diff --git a/.github/file-filters.yml b/.github/file-filters.yml
index 87e635bc..c0b9bb34 100644
--- a/.github/file-filters.yml
+++ b/.github/file-filters.yml
@@ -36,7 +36,8 @@ markdown_all: &markdown_all
infrahub_reference_generated: &infrahub_reference_generated
- "docs/docs/infrahubctl/*.mdx"
- - "docs/docs/python-sdk/reference/config.mdx"
+ - "docs/docs/python-sdk/reference/*.mdx"
+ - "docs/docs/python-sdk/sdk_ref/**/*.mdx"
documentation_all:
- *development_files
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 16d2de2d..faf53368 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -108,7 +108,7 @@ jobs:
- name: "Linting: markdownlint"
uses: DavidAnson/markdownlint-cli2-action@v22
with:
- config: .markdownlint.yaml
+ config: docs/.markdownlint.yaml
globs: |
**/*.{md,mdx}
!changelog/*.md
@@ -176,7 +176,7 @@ jobs:
uses: actions/setup-node@v5
with:
node-version: 20
- cache: 'npm'
+ cache: "npm"
cache-dependency-path: docs/package-lock.json
- name: "Install dependencies"
run: npm install
@@ -207,6 +207,15 @@ jobs:
uses: "actions/checkout@v6"
with:
submodules: true
+ - name: Install NodeJS
+ uses: actions/setup-node@v5
+ with:
+ node-version: 20
+ cache: "npm"
+ cache-dependency-path: docs/package-lock.json
+ - name: "Install npm dependencies"
+ run: npm install
+ working-directory: ./docs
- name: Set up Python
uses: actions/setup-python@v6
with:
@@ -217,9 +226,11 @@ jobs:
version: "${{ needs.prepare-environment.outputs.UV_VERSION }}"
- name: Install dependencies
run: uv sync --all-groups --all-extras
+ - name: Docs unit tests
+ run: npx --no-install vitest run
+ working-directory: ./docs
- name: Validate generated documentation
run: uv run invoke docs-validate
-
validate-documentation-style:
if: |
always() && !cancelled() &&
@@ -236,6 +247,7 @@ jobs:
# The official GitHub Action for Vale doesn't work, installing manually instead:
# https://github.com/errata-ai/vale-action/issues/103
+ # cf -> https://github.com/nf-core/website/pull/3509
- name: Download Vale
run: |
curl -sL "https://github.com/errata-ai/vale/releases/download/v${VALE_VERSION}/vale_${VALE_VERSION}_Linux_64-bit.tar.gz" -o vale.tar.gz
diff --git a/.github/workflows/sync-docs.yml b/.github/workflows/sync-docs.yml
index 65574ce7..67ff7ac9 100644
--- a/.github/workflows/sync-docs.yml
+++ b/.github/workflows/sync-docs.yml
@@ -8,8 +8,9 @@ on:
- stable
paths:
- 'docs/docs/**'
- - 'docs/sidebars-infrahubctl.ts'
- - 'docs/sidebars-python-sdk.ts'
+ - 'docs/sidebars/sidebars-infrahubctl.ts'
+ - 'docs/sidebars/sidebars-python-sdk.ts'
+ - 'docs/sidebars/sidebar-utils.ts'
jobs:
sync:
@@ -33,8 +34,9 @@ jobs:
rm -f target-repo/docs/sidebars-python-sdk.ts
rm -f target-repo/docs/sidebars-infrahubctl.ts
cp -r source-repo/docs/docs/* target-repo/docs/docs-python-sdk/
- cp source-repo/docs/sidebars-infrahubctl.ts target-repo/docs/
- cp source-repo/docs/sidebars-python-sdk.ts target-repo/docs/
+ cp source-repo/docs/sidebars/sidebars-infrahubctl.ts target-repo/docs/
+ cp source-repo/docs/sidebars/sidebars-python-sdk.ts target-repo/docs/
+ cp source-repo/docs/sidebars/sidebar-utils.ts target-repo/docs/
cd target-repo
git config user.name github-actions
git config user.email github-actions@github.com
diff --git a/.vale.ini b/.vale.ini
index 38608874..6133f5d4 100644
--- a/.vale.ini
+++ b/.vale.ini
@@ -23,3 +23,10 @@ BasedOnStyles =
[*]
BasedOnStyles = Infrahub
+
+# Generated API reference docs: use GeneratedRef spelling (allows snake_case)
+# instead of global Infrahub spelling. Must be last to override [*].
+[docs/docs/python-sdk/sdk_ref/**/*.mdx]
+BasedOnStyles = Infrahub, GeneratedRef
+Infrahub.spelling = NO
+BlockIgnores = (?s) *((import.*?\n)|(```.*?```\n))
diff --git a/.vale/styles/GeneratedRef/spelling.yml b/.vale/styles/GeneratedRef/spelling.yml
new file mode 100644
index 00000000..52aea108
--- /dev/null
+++ b/.vale/styles/GeneratedRef/spelling.yml
@@ -0,0 +1,10 @@
+---
+extends: spelling
+message: "Did you really mean '%s'?"
+level: error
+filters:
+ - '[pP]y.*\b'
+ - '\bimport_.*\b' # Ignore variables starting with 'import_'
+ - '\w+__value' # Skip Infrahub filters in documentation (name__value)
+ - '\b\w+_\w+\b' # Ignore snake_case identifiers in generated API reference docs
+ignore: spelling-exceptions.txt
diff --git a/.vale/styles/Infrahub/sentence-case.yml b/.vale/styles/Infrahub/sentence-case.yml
index 126e18f6..c27cf7a1 100644
--- a/.vale/styles/Infrahub/sentence-case.yml
+++ b/.vale/styles/Infrahub/sentence-case.yml
@@ -52,6 +52,7 @@ exceptions:
- Jinja
- Jinja2
- JWT
+ - MDX
- Namespace
- NATS
- Node
diff --git a/.vale/styles/Infrahub/spelling.yml b/.vale/styles/Infrahub/spelling.yml
index 5fbbbcdd..8f0bfa73 100644
--- a/.vale/styles/Infrahub/spelling.yml
+++ b/.vale/styles/Infrahub/spelling.yml
@@ -4,6 +4,6 @@ message: "Did you really mean '%s'?"
level: error
filters:
- '[pP]y.*\b'
- - '\bimport_.*\b' # New filter to ignore variables starting with 'import_'
- - '\w+__value' # New filter to skip Infrahub filters in documentation (name__value)
+ - '\bimport_.*\b' # Ignore variables starting with 'import_'
+ - '\w+__value' # Skip Infrahub filters in documentation (name__value)
ignore: spelling-exceptions.txt
diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt
index 2da24e1f..ecba179f 100644
--- a/.vale/styles/spelling-exceptions.txt
+++ b/.vale/styles/spelling-exceptions.txt
@@ -3,6 +3,7 @@ Alibaba
Ansible
append_git_suffix
APIs
+Args
artifact_definitions
artifact_name
async
@@ -78,6 +79,7 @@ kbps
Keycloak
Loopbacks
markdownlint
+MDX
max_count
memgraph
menu_placement
diff --git a/AGENTS.md b/AGENTS.md
index 00de5ab1..f5896de6 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -7,8 +7,10 @@ Infrahub Python SDK - async/sync client for Infrahub infrastructure management.
```bash
uv sync --all-groups --all-extras # Install all deps
uv run invoke format # Format code
-uv run invoke lint # All linters (code + yamllint + documentation)
+uv run invoke lint # Full pipeline: ruff, yamllint, ty, mypy, markdownlint, vale
uv run invoke lint-code # All linters for Python code
+uv run invoke docs-generate # Generate all docs (CLI + SDK)
+uv run invoke docs-validate # Check generated docs match committed version
uv run pytest tests/unit/ # Unit tests
uv run pytest tests/integration/ # Integration tests
```
@@ -54,7 +56,7 @@ Key rules:
✅ **Always**
- Run `uv run invoke format lint-code` before committing Python code
-- Run `uv run invoke generate-sdk generate-infrahubctl` after changing CLI commands or SDK config
+- Run `uv run invoke docs-generate` after creating, modifying or deleting CLI commands, SDK config, or Python docstrings
- Run markdownlint before committing markdown changes
- Follow async/sync dual pattern for new features
- Use type hints on all function signatures
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b6c418e0..a3200c39 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,6 +10,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the changes for the upcoming release can be found in .
+
+## [1.19.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.19.0) - 2026-03-16
+
+### Added
+
+- Added support for FileObject nodes with file upload and download capabilities. New methods `upload_from_path(path)` and `upload_from_bytes(content, name)` allow setting file content before saving, while `download_file(dest)` enables downloading files to memory or streaming to disk for large files. ([#ihs193](https://github.com/opsmill/infrahub-sdk-python/issues/ihs193))
+- Python SDK API documentation is now generated directly from the docstrings of the classes, functions, and methods contained in the code. ([#201](https://github.com/opsmill/infrahub-sdk-python/issues/201))
+- Added a 'py.typed' file to the project. This is to enable type checking when the Infrahub SDK is imported from other projects. The addition of this file could cause new typing issues in external projects until all typing issues have been resolved. Adding it to the project now to better highlight remaining issues.
+
+### Changed
+
+- Updated branch report command to use node metadata for proposed change creator information instead of the deprecated relationship-based approach. Requires Infrahub 1.7 or above.
+
+### Fixed
+
+- Allow SDK tracking feature to continue after encountering delete errors due to impacted nodes having already been deleted by cascade delete. ([#265](https://github.com/opsmill/infrahub-sdk-python/issues/265))
+- Fixed Python SDK query generation regarding from_pool generated attribute value ([#497](https://github.com/opsmill/infrahub-sdk-python/issues/497))
+
## [1.18.1](https://github.com/opsmill/infrahub-sdk-python/tree/v1.18.1) - 2026-01-08
### Fixed
diff --git a/changelog/265.fixed.md b/changelog/265.fixed.md
deleted file mode 100644
index 4e3c43a9..00000000
--- a/changelog/265.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Allow SDK tracking feature to continue after encountering delete errors due to impacted nodes having already been deleted by cascade delete.
diff --git a/dev/commands/feedback.md b/dev/commands/feedback.md
new file mode 100644
index 00000000..b89896be
--- /dev/null
+++ b/dev/commands/feedback.md
@@ -0,0 +1,92 @@
+# Session Feedback
+
+Analyze this conversation and identify what documentation or context was missing, incomplete, or incorrect. The goal is to continuously improve the project's knowledge base so future conversations are more efficient.
+
+## Step 1: Session Analysis
+
+Reflect on the work done in this conversation. For each area, identify friction points:
+
+1. **Exploration overhead**: What parts of the codebase did you have to discover by searching that should have been documented? (e.g., patterns, conventions, module responsibilities)
+2. **Wrong assumptions**: Did you make incorrect assumptions due to missing or misleading documentation?
+3. **Repeated patterns**: Did you discover recurring patterns or conventions that aren't documented anywhere?
+4. **Missing context**: What background knowledge would have helped you start faster? (e.g., architecture decisions, data flow, naming conventions)
+5. **Tooling gaps**: Were there commands, scripts, or workflows that you had to figure out?
+
+## Step 2: Documentation Audit
+
+For each friction point identified, determine the appropriate fix. Check the existing documentation to avoid duplicating what's already there:
+
+- `AGENTS.md` — Top-level project instructions and component map
+- `CLAUDE.md` — Entry point referencing AGENTS.md
+- `docs/AGENTS.md` — Documentation site guide
+- `infrahub_sdk/ctl/AGENTS.md` — CLI development guide
+- `infrahub_sdk/pytest_plugin/AGENTS.md` — Pytest plugin guide
+- `tests/AGENTS.md` — Testing guide
+
+Read the relevant existing files to understand what's already documented before proposing changes.
+
+## Step 3: Generate Report
+
+Present the feedback as a structured report with the following sections. Only include sections that have content — skip empty sections.
+
+### Format
+
+```markdown
+## Session Feedback Report
+
+### What I Was Working On
+
+
+### Documentation Gaps
+
+
+For each gap:
+
+- **Topic**: What's missing
+- **Where**: Which file should contain this (existing file to update, or new file to create)
+- **Why**: How this would have helped during this conversation
+- **Suggested content**: A draft of what should be added (be specific and actionable)
+
+### Documentation Corrections
+
+
+For each correction:
+
+- **File**: Path to the file
+- **Issue**: What's wrong or misleading
+- **Fix**: What it should say instead
+
+### Discovered Patterns
+
+
+For each pattern:
+
+- **Pattern**: Description of the convention
+- **Evidence**: Where in the code this pattern is used (file paths)
+- **Where to document**: Which AGENTS.md or guide file should capture this
+
+### Memory Updates
+
+
+For each update:
+
+- **Action**: Add / Update / Remove
+- **Content**: What to write
+- **Reason**: Why this is worth remembering across sessions
+```
+
+## Step 4: Apply Changes
+
+After presenting the report, ask the user which changes they want to apply. Present the options:
+
+1. **Apply all** — Create/update all proposed documentation files and memory
+2. **Cherry-pick** — Let the user select which changes to apply
+3. **None** — Just keep the report as reference, don't modify any files
+
+
+For approved changes:
+
+- Edit existing files when updating documentation
+- Create new files only when no appropriate existing file exists
+- Update `MEMORY.md` with approved memory changes
+- Keep all changes minimal and focused — don't over-document
diff --git a/dev/commands/pre-ci.md b/dev/commands/pre-ci.md
new file mode 100644
index 00000000..7f14d70d
--- /dev/null
+++ b/dev/commands/pre-ci.md
@@ -0,0 +1,36 @@
+Run a subset of fast CI checks locally. These are lightweight validations that catch common issues before pushing. Run all steps and report a summary at the end.
+
+## Steps
+
+1. **Format** Python code:
+ ```bash
+ uv run invoke format
+ ```
+
+2. **Lint** (YAML, Ruff, ty, mypy, markdownlint, vale):
+ ```bash
+ uv run invoke lint
+ ```
+
+3. **Python unit tests**:
+ ```bash
+ uv run pytest tests/unit/
+ ```
+
+4. **Docs unit tests** (vitest):
+ ```bash
+ (cd docs && npx --no-install vitest run)
+ ```
+
+5. **Validate generated documentation** (regenerate and check for drift):
+ ```bash
+ uv run invoke docs-validate
+ ```
+
+## Instructions
+
+- Run each step in order using the Bash tool.
+- If a step fails, continue with the remaining steps.
+- At the end, print a summary table of all steps with pass/fail status.
+- Do NOT commit or push anything.
+
diff --git a/.markdownlint.yaml b/docs/.markdownlint.yaml
similarity index 100%
rename from .markdownlint.yaml
rename to docs/.markdownlint.yaml
diff --git a/docs/AGENTS.md b/docs/AGENTS.md
index 36021cbb..43e104c6 100644
--- a/docs/AGENTS.md
+++ b/docs/AGENTS.md
@@ -1,4 +1,4 @@
-# docs/AGENTS.md
+# Documentation agents
Docusaurus documentation following Diataxis framework.
@@ -8,8 +8,10 @@ Docusaurus documentation following Diataxis framework.
cd docs && npm install # Install deps
cd docs && npm start # Dev server at localhost:3000
cd docs && npm run build # Build static site
-uv run invoke docs # Generate auto-docs
-uv run invoke docs-validate # Validate docs are current
+cd docs && npm test # Run sidebar utility tests
+uv run invoke docs # Build documentation website
+uv run invoke docs-generate # Regenerate all docs (infrahubctl CLI + Python SDK)
+uv run invoke docs-validate # Check that generated docs match committed files
```
## Structure
@@ -23,13 +25,21 @@ docs/docs/
└── infrahubctl/ # CLI docs (auto-generated)
```
-## Adding Documentation
+## Sidebars
+
+Sidebar navigation is dynamic: `sidebars-*.ts` files read the filesystem at build time via utility functions in `sidebar-utils.ts`.
+
+- **infrahubctl**: all `.mdx` files are discovered automatically and sorted alphabetically.
+- **python-sdk**: guides, topics, and reference sections preserve a defined display order; new files are appended alphabetically at the end.
+
+No manual sidebar update is needed when adding a new `.mdx` file. However, to control the display order of a new page, add its doc ID to the ordered list in the corresponding `sidebars-*.ts` file.
+
+## Adding documentation
1. Create MDX file in appropriate directory
2. Add frontmatter with `title`
-3. Update `sidebars-*.ts` for navigation
-## MDX Pattern
+## MDX pattern
Use Tabs for async/sync examples, callouts for notes:
@@ -52,9 +62,11 @@ Use callouts for important notes.
✅ **Always**
- Include both async/sync examples using Tabs
-- Run `uv run invoke docs-validate` after code changes
+- Run `uv run invoke docs-validate` after code changes to verify generated docs are up to date
🚫 **Never**
-- Edit `docs/infrahubctl/*.mdx` directly (regenerate with `uv run invoke generate-infrahubctl`)
-- Edit `docs/python-sdk/reference/config.mdx` directly (regenerate with `uv run invoke generate-sdk`)
+- Edit `docs/infrahubctl/*.mdx` directly (regenerate with `uv run invoke docs-generate`)
+- Edit `docs/python-sdk/reference/config.mdx` directly (regenerate with `uv run invoke docs-generate`)
+- Edit `docs/python-sdk/reference/templating.mdx` directly (regenerate with `uv run invoke docs-generate`)
+- Edit `docs/python-sdk/sdk_ref/**/*.mdx` directly (regenerate with `uv run invoke docs-generate`)
diff --git a/docs/__init__.py b/docs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/_templates/sdk_config.j2 b/docs/_templates/sdk_config.j2
index 7922a363..4fbc9d39 100644
--- a/docs/_templates/sdk_config.j2
+++ b/docs/_templates/sdk_config.j2
@@ -31,8 +31,6 @@ The following settings can be defined in the `Config` class
{% for property in properties %}
## {{ property.name }}
-
-**Property**: {{ property.name }}
**Description**: {% if '\n' in property.description %} {% endif %}{{ property.description }}
**Type**: `{{ property.type }}`
diff --git a/docs/docs/python-sdk/guides/client.mdx b/docs/docs/python-sdk/guides/client.mdx
index 460036a1..90872a0f 100644
--- a/docs/docs/python-sdk/guides/client.mdx
+++ b/docs/docs/python-sdk/guides/client.mdx
@@ -251,7 +251,7 @@ Your client is now configured to use the specified default branch instead of `ma
## Hello world example
-Let's create a simple "Hello World" example to verify your client configuration works correctly. This example will connect to your Infrahub instance and query the available accounts.
+Let's create a "Hello World" example to verify your client configuration works correctly. This example will connect to your Infrahub instance and query the available accounts.
1. Create a new file called `hello_world.py`:
diff --git a/docs/docs/python-sdk/guides/python-typing.mdx b/docs/docs/python-sdk/guides/python-typing.mdx
index 9bc2c323..77780177 100644
--- a/docs/docs/python-sdk/guides/python-typing.mdx
+++ b/docs/docs/python-sdk/guides/python-typing.mdx
@@ -131,7 +131,7 @@ infrahubctl graphql generate-return-types queries/get_tags.gql
### Example workflow
-1. **Create your GraphQL queries** in `.gql` files preferably in a directory (e.g., `queries/`):
+1. **Create your GraphQL queries** in `.gql` files preferably in a directory (for example, `queries/`):
```graphql
# queries/get_tags.gql
diff --git a/docs/docs/python-sdk/reference/config.mdx b/docs/docs/python-sdk/reference/config.mdx
index 320aebb4..1b525389 100644
--- a/docs/docs/python-sdk/reference/config.mdx
+++ b/docs/docs/python-sdk/reference/config.mdx
@@ -30,8 +30,6 @@ The Python SDK (Async or Sync) client can be configured using an instance of the
The following settings can be defined in the `Config` class
## address
-
-**Property**: address
**Description**: The URL to use when connecting to Infrahub.
**Type**: `string`
@@ -39,16 +37,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_ADDRESS`
## api_token
-
-**Property**: api_token
**Description**: API token for authentication against Infrahub.
**Type**: `string`
**Environment variable**: `INFRAHUB_API_TOKEN`
## echo_graphql_queries
-
-**Property**: echo_graphql_queries
**Description**: If set the GraphQL query and variables will be echoed to the screen
**Type**: `boolean`
@@ -56,24 +50,18 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_ECHO_GRAPHQL_QUERIES`
## username
-
-**Property**: username
**Description**: Username for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_USERNAME`
## password
-
-**Property**: password
**Description**: Password for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_PASSWORD`
## default_branch
-
-**Property**: default_branch
**Description**: Default branch to target if not specified for each request.
**Type**: `string`
@@ -81,8 +69,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_DEFAULT_BRANCH`
## default_branch_from_git
-
-**Property**: default_branch_from_git
**Description**: Indicates if the default Infrahub branch to target should come from the active branch in the local Git repository.
**Type**: `boolean`
@@ -90,16 +76,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_DEFAULT_BRANCH_FROM_GIT`
## identifier
-
-**Property**: identifier
**Description**: Tracker identifier
**Type**: `string`
**Environment variable**: `INFRAHUB_IDENTIFIER`
## insert_tracker
-
-**Property**: insert_tracker
**Description**: Insert a tracker on queries to the server
**Type**: `boolean`
@@ -107,8 +89,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_INSERT_TRACKER`
## max_concurrent_execution
-
-**Property**: max_concurrent_execution
**Description**: Max concurrent execution in batch mode
**Type**: `integer`
@@ -116,16 +96,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_MAX_CONCURRENT_EXECUTION`
## mode
-
-**Property**: mode
**Description**: Default mode for the client
**Type**: `object`
**Environment variable**: `INFRAHUB_MODE`
## pagination_size
-
-**Property**: pagination_size
**Description**: Page size for queries to the server
**Type**: `integer`
@@ -133,8 +109,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_PAGINATION_SIZE`
## retry_delay
-
-**Property**: retry_delay
**Description**: Number of seconds to wait until attempting a retry.
**Type**: `integer`
@@ -142,8 +116,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_RETRY_DELAY`
## retry_on_failure
-
-**Property**: retry_on_failure
**Description**: Retry operation in case of failure
**Type**: `boolean`
@@ -151,8 +123,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_RETRY_ON_FAILURE`
## max_retry_duration
-
-**Property**: max_retry_duration
**Description**: Maximum duration until we stop attempting to retry if enabled.
**Type**: `integer`
@@ -160,8 +130,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_MAX_RETRY_DURATION`
## schema_converge_timeout
-
-**Property**: schema_converge_timeout
**Description**: Number of seconds to wait for schema to have converged
**Type**: `integer`
@@ -169,8 +137,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_SCHEMA_CONVERGE_TIMEOUT`
## timeout
-
-**Property**: timeout
**Description**: Default connection timeout in seconds
**Type**: `integer`
@@ -178,32 +144,24 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_TIMEOUT`
## transport
-
-**Property**: transport
**Description**: Set an alternate transport using a predefined option
**Type**: `object`
**Environment variable**: `INFRAHUB_TRANSPORT`
## proxy
-
-**Property**: proxy
**Description**: Proxy address
**Type**: `string`
**Environment variable**: `INFRAHUB_PROXY`
## proxy_mounts
-
-**Property**: proxy_mounts
**Description**: Proxy mounts configuration
**Type**: `object`
**Environment variable**: `INFRAHUB_PROXY_MOUNTS`
## update_group_context
-
-**Property**: update_group_context
**Description**: Update GraphQL query groups
**Type**: `boolean`
@@ -211,8 +169,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_UPDATE_GROUP_CONTEXT`
## tls_insecure
-
-**Property**: tls_insecure
**Description**:
Indicates if TLS certificates are verified.
@@ -223,8 +179,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_TLS_INSECURE`
## tls_ca_file
-
-**Property**: tls_ca_file
**Description**: File path to CA cert or bundle in PEM format
**Type**: `string`
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx
new file mode 100644
index 00000000..7b47c99c
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx
@@ -0,0 +1,942 @@
+---
+title: client
+sidebarTitle: client
+---
+
+# `infrahub_sdk.client`
+
+## Classes
+
+### `InfrahubClient`
+
+GraphQL Client to interact with Infrahub.
+
+**Methods:**
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType | None
+```
+
+
+Show 6 other overloads
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode | None
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNode | SchemaType | None
+```
+
+
+#### `delete`
+
+```python
+delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None
+```
+
+#### `create`
+
+```python
+create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNode | SchemaType
+```
+
+
+Show 2 other overloads
+
+#### `create`
+
+```python
+create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `create`
+
+```python
+create(self, kind: type[SchemaType], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaType
+```
+
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+Return the Infrahub version.
+
+#### `get_user`
+
+```python
+get_user(self) -> dict
+```
+
+Return user information
+
+#### `get_user_permissions`
+
+```python
+get_user_permissions(self) -> dict
+```
+
+Return user permissions
+
+#### `count`
+
+```python
+count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int
+```
+
+Return the number of nodes of a given kind.
+
+#### `all`
+
+```python
+all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaType]
+```
+
+
+Show 2 other overloads
+
+#### `all`
+
+```python
+all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNode]
+```
+
+#### `all`
+
+```python
+all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNode] | list[SchemaType]
+```
+
+Retrieve all nodes of a given kind
+
+**Args:**
+
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to pre-fetch related node data.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+
+- list\[InfrahubNode]: List of Nodes
+
+
+#### `filters`
+
+```python
+filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaType]
+```
+
+
+Show 2 other overloads
+
+#### `filters`
+
+```python
+filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNode]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNode] | list[SchemaType]
+```
+
+Retrieve nodes of a given kind based on provided filters.
+
+**Args:**
+
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to pre-fetch related node data.
+- `partial_match`: Allow partial match of filter criteria for the query.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+- `**kwargs`: Additional filter criteria for the query.
+
+**Returns:**
+
+- list\[InfrahubNodeSync]: List of Nodes that match the given filters.
+
+
+#### `clone`
+
+```python
+clone(self, branch: str | None = None) -> InfrahubClient
+```
+
+Return a cloned version of the client using the same configuration
+
+#### `execute_graphql`
+
+```python
+execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict
+```
+
+Execute a GraphQL query (or mutation).
+If retry_on_failure is True, the query will retry until the server becomes reachable.
+
+**Args:**
+
+- `query`: GraphQL Query to execute, can be a query or a mutation
+- `variables`: Variables to pass along with the GraphQL query. Defaults to None.
+- `branch_name`: Name of the branch on which the query will be executed. Defaults to None.
+- `at`: Time when the query should be executed. Defaults to None.
+- `timeout`: Timeout in second for the query. Defaults to None.
+- `raise_for_error`: Deprecated. Controls only HTTP status handling.
+- None (default) or True\: HTTP errors raise via resp.raise_for_status().
+- False\: HTTP errors are not automatically raised. Defaults to None.
+
+**Raises:**
+
+- `GraphQLError`: When the GraphQL response contains errors.
+
+**Returns:**
+
+- The GraphQL data payload (response["data"]).
+
+#### `refresh_login`
+
+```python
+refresh_login(self) -> None
+```
+
+#### `login`
+
+```python
+login(self, refresh: bool = False) -> None
+```
+
+#### `query_gql_query`
+
+```python
+query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict
+```
+
+#### `create_diff`
+
+```python
+create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str
+```
+
+#### `get_diff_summary`
+
+```python
+get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff]
+```
+
+#### `get_diff_tree`
+
+```python
+get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None
+```
+
+Get complete diff tree with metadata and nodes.
+
+Returns None if no diff exists.
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType
+```
+
+
+Show 6 other overloads
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None
+```
+
+Allocate a new IP address by using the provided resource pool.
+
+**Args:**
+
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to set on the address to allocate.
+- `address_type`: Kind of the address to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated address.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX.
+
+Returns:
+ InfrahubNode: Node corresponding to the allocated resource.
+
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType
+```
+
+
+Show 6 other overloads
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None
+```
+
+Allocate a new IP prefix by using the provided resource pool.
+
+**Args:**
+
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to allocate.
+- `member_type`: Member type of the prefix to allocate.
+- `prefix_type`: Kind of the prefix to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated prefix.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX.
+
+Returns:
+ InfrahubNode: Node corresponding to the allocated resource.
+
+
+#### `create_batch`
+
+```python
+create_batch(self, return_exceptions: bool = False) -> InfrahubBatch
+```
+
+#### `get_list_repositories`
+
+```python
+get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData]
+```
+
+#### `repository_update_commit`
+
+```python
+repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool
+```
+
+#### `convert_object_type`
+
+```python
+convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode
+```
+
+Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
+and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
+in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
+for more information.
+
+### `InfrahubClientSync`
+
+**Methods:**
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync | None
+```
+
+
+Show 6 other overloads
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync | None
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str | type[SchemaTypeSync], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync | None
+```
+
+
+#### `delete`
+
+```python
+delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None
+```
+
+#### `create`
+
+```python
+create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync
+```
+
+
+Show 2 other overloads
+
+#### `create`
+
+```python
+create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `create`
+
+```python
+create(self, kind: type[SchemaTypeSync], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+Return the Infrahub version.
+
+#### `get_user`
+
+```python
+get_user(self) -> dict
+```
+
+Return user information
+
+#### `get_user_permissions`
+
+```python
+get_user_permissions(self) -> dict
+```
+
+Return user permissions
+
+#### `clone`
+
+```python
+clone(self, branch: str | None = None) -> InfrahubClientSync
+```
+
+Return a cloned version of the client using the same configuration
+
+#### `execute_graphql`
+
+```python
+execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict
+```
+
+Execute a GraphQL query (or mutation).
+If retry_on_failure is True, the query will retry until the server becomes reachable.
+
+**Args:**
+
+- `query`: GraphQL Query to execute, can be a query or a mutation
+- `variables`: Variables to pass along with the GraphQL query. Defaults to None.
+- `branch_name`: Name of the branch on which the query will be executed. Defaults to None.
+- `at`: Time when the query should be executed. Defaults to None.
+- `timeout`: Timeout in second for the query. Defaults to None.
+- `raise_for_error`: Deprecated. Controls only HTTP status handling.
+- None (default) or True\: HTTP errors raise via `resp.raise_for_status()`.
+- False\: HTTP errors are not automatically raised.
+GraphQL errors always raise `GraphQLError`. Defaults to None.
+
+**Raises:**
+
+- `GraphQLError`: When the GraphQL response contains errors.
+
+**Returns:**
+
+- The GraphQL data payload (`response["data"]`).
+
+#### `count`
+
+```python
+count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int
+```
+
+Return the number of nodes of a given kind.
+
+#### `all`
+
+```python
+all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaTypeSync]
+```
+
+
+Show 2 other overloads
+
+#### `all`
+
+```python
+all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNodeSync]
+```
+
+#### `all`
+
+```python
+all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNodeSync] | list[SchemaTypeSync]
+```
+
+Retrieve all nodes of a given kind
+
+**Args:**
+
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to pre-fetch related node data.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+
+- list\[InfrahubNodeSync]: List of Nodes
+
+
+#### `filters`
+
+```python
+filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaTypeSync]
+```
+
+
+Show 2 other overloads
+
+#### `filters`
+
+```python
+filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNodeSync]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNodeSync] | list[SchemaTypeSync]
+```
+
+Retrieve nodes of a given kind based on provided filters.
+
+**Args:**
+
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to pre-fetch related node data.
+- `partial_match`: Allow partial match of filter criteria for the query.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+- `**kwargs`: Additional filter criteria for the query.
+
+**Returns:**
+
+- list\[InfrahubNodeSync]: List of Nodes that match the given filters.
+
+
+#### `create_batch`
+
+```python
+create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync
+```
+
+Create a batch to execute multiple queries concurrently.
+
+Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such
+batch to manipulate objects that depend on each others.
+
+#### `get_list_repositories`
+
+```python
+get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData]
+```
+
+#### `query_gql_query`
+
+```python
+query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict
+```
+
+#### `create_diff`
+
+```python
+create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str
+```
+
+#### `get_diff_summary`
+
+```python
+get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff]
+```
+
+#### `get_diff_tree`
+
+```python
+get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None
+```
+
+Get complete diff tree with metadata and nodes.
+
+Returns None if no diff exists.
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync
+```
+
+
+Show 6 other overloads
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None
+```
+
+Allocate a new IP address by using the provided resource pool.
+
+**Args:**
+
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to set on the address to allocate.
+- `address_type`: Kind of the address to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated address.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: The limit for pagination.
+
+Returns:
+ InfrahubNodeSync: Node corresponding to the allocated resource.
+
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync
+```
+
+
+Show 6 other overloads
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None
+```
+
+Allocate a new IP prefix by using the provided resource pool.
+
+**Args:**
+
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `size`: Length of the prefix to allocate.
+- `member_type`: Member type of the prefix to allocate.
+- `prefix_type`: Kind of the prefix to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated prefix.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: The limit for pagination.
+
+Returns:
+ InfrahubNodeSync: Node corresponding to the allocated resource.
+
+
+#### `repository_update_commit`
+
+```python
+repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool
+```
+
+#### `refresh_login`
+
+```python
+refresh_login(self) -> None
+```
+
+#### `login`
+
+```python
+login(self, refresh: bool = False) -> None
+```
+
+#### `convert_object_type`
+
+```python
+convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync
+```
+
+Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
+and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
+in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
+for more information.
+
+### `ProcessRelationsNode`
+
+### `ProxyConfig`
+
+### `ProxyConfigSync`
+
+### `ProcessRelationsNodeSync`
+
+### `BaseClient`
+
+Base class for InfrahubClient and InfrahubClientSync
+
+**Methods:**
+
+#### `request_context`
+
+```python
+request_context(self) -> RequestContext | None
+```
+
+#### `request_context`
+
+```python
+request_context(self, request_context: RequestContext) -> None
+```
+
+#### `start_tracking`
+
+```python
+start_tracking(self, identifier: str | None = None, params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> Self
+```
+
+#### `set_context_properties`
+
+```python
+set_context_properties(self, identifier: str, params: dict[str, str] | None = None, delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> None
+```
+
+## Functions
+
+### `handle_relogin`
+
+```python
+handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]) -> Callable[..., Coroutine[Any, Any, httpx.Response]]
+```
+
+### `handle_relogin_sync`
+
+```python
+handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response]
+```
+
+### `raise_for_error_deprecation_warning`
+
+```python
+raise_for_error_deprecation_warning(value: bool | None) -> None
+```
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx
new file mode 100644
index 00000000..d08c7fc5
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx
@@ -0,0 +1,38 @@
+---
+title: attribute
+sidebarTitle: attribute
+---
+
+# `infrahub_sdk.node.attribute`
+
+## Classes
+
+### `Attribute`
+
+Represents an attribute of a Node, including its schema, value, and properties.
+
+**Methods:**
+
+#### `value`
+
+```python
+value(self) -> Any
+```
+
+#### `value`
+
+```python
+value(self, value: Any) -> None
+```
+
+#### `is_from_pool_attribute`
+
+```python
+is_from_pool_attribute(self) -> bool
+```
+
+Check whether this attribute's value is sourced from a resource pool.
+
+**Returns:**
+
+- True if the attribute value is a resource pool node or was explicitly allocated from a pool.
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx
new file mode 100644
index 00000000..290f923e
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx
@@ -0,0 +1,8 @@
+---
+title: constants
+sidebarTitle: constants
+---
+
+# `infrahub_sdk.node.constants`
+
+*This module is empty or contains only private/internal implementations.*
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx
new file mode 100644
index 00000000..6175236f
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx
@@ -0,0 +1,16 @@
+---
+title: metadata
+sidebarTitle: metadata
+---
+
+# `infrahub_sdk.node.metadata`
+
+## Classes
+
+### `NodeMetadata`
+
+Represents metadata about a node (created_at, created_by, updated_at, updated_by).
+
+### `RelationshipMetadata`
+
+Represents metadata about a relationship edge (updated_at, updated_by).
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx
new file mode 100644
index 00000000..e23120dd
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx
@@ -0,0 +1,526 @@
+---
+title: node
+sidebarTitle: node
+---
+
+# `infrahub_sdk.node.node`
+
+## Classes
+
+### `InfrahubNode`
+
+Represents a Infrahub node in an asynchronous context.
+
+**Methods:**
+
+#### `from_graphql`
+
+```python
+from_graphql(cls, client: InfrahubClient, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self
+```
+
+#### `generate`
+
+```python
+generate(self, nodes: list[str] | None = None) -> None
+```
+
+#### `artifact_generate`
+
+```python
+artifact_generate(self, name: str) -> None
+```
+
+#### `artifact_fetch`
+
+```python
+artifact_fetch(self, name: str) -> str | dict[str, Any]
+```
+
+#### `download_file`
+
+```python
+download_file(self, dest: Path | None = None) -> bytes | int
+```
+
+Download the file content from this FileObject node.
+
+This method is only available for nodes that inherit from CoreFileObject.
+The node must have been saved (have an id) before calling this method.
+
+**Args:**
+
+- `dest`: Optional destination path. If provided, the file will be streamed
+ directly to this path (memory-efficient for large files) and the
+ number of bytes written will be returned. If not provided, the
+ file content will be returned as bytes.
+
+**Returns:**
+
+- If ``dest`` is None: The file content as bytes.
+- If ``dest`` is provided: The number of bytes written to the file.
+
+**Raises:**
+
+- `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject.
+- `ValueError`: If the node hasn't been saved yet or file not found.
+- `AuthenticationError`: If authentication fails.
+
+**Examples:**
+
+```python
+>>> # Download to memory
+>>> content = await contract.download_file()
+>>> # Stream to file (memory-efficient for large files)
+>>> bytes_written = await contract.download_file(dest=Path("/tmp/contract.pdf"))
+```
+
+#### `delete`
+
+```python
+delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `save`
+
+```python
+save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `generate_query_data`
+
+```python
+generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+#### `generate_query_data_node`
+
+```python
+generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+Generate the node part of a GraphQL Query with attributes and nodes.
+
+**Args:**
+
+- `include`: List of attributes or relationships to include. Defaults to None.
+- `exclude`: List of attributes or relationships to exclude. Defaults to None.
+- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well.
+ Defaults to True.
+- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship.
+- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+
+- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format
+
+#### `add_relationships`
+
+```python
+add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `remove_relationships`
+
+```python
+remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `create`
+
+```python
+create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `update`
+
+```python
+update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `get_pool_allocated_resources`
+
+```python
+get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode]
+```
+
+Fetch all nodes that were allocated for the pool and a given resource.
+
+**Args:**
+
+- `resource`: The resource from which the nodes were allocated.
+
+**Returns:**
+
+- list\[InfrahubNode]: The allocated nodes.
+
+#### `get_pool_resources_utilization`
+
+```python
+get_pool_resources_utilization(self) -> list[dict[str, Any]]
+```
+
+Fetch the utilization of each resource for the pool.
+
+**Returns:**
+
+- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool.
+
+#### `get_flat_value`
+
+```python
+get_flat_value(self, key: str, separator: str = '__') -> Any
+```
+
+Query recursively a value defined in a flat notation (string), on a hierarchy of objects
+
+**Examples:**
+
+name__value
+module.object.value
+
+#### `extract`
+
+```python
+extract(self, params: dict[str, str]) -> dict[str, Any]
+```
+
+Extract some data points defined in a flat notation.
+
+### `InfrahubNodeSync`
+
+Represents a Infrahub node in a synchronous context.
+
+**Methods:**
+
+#### `from_graphql`
+
+```python
+from_graphql(cls, client: InfrahubClientSync, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self
+```
+
+#### `generate`
+
+```python
+generate(self, nodes: list[str] | None = None) -> None
+```
+
+#### `artifact_generate`
+
+```python
+artifact_generate(self, name: str) -> None
+```
+
+#### `artifact_fetch`
+
+```python
+artifact_fetch(self, name: str) -> str | dict[str, Any]
+```
+
+#### `download_file`
+
+```python
+download_file(self, dest: Path | None = None) -> bytes | int
+```
+
+Download the file content from this FileObject node.
+
+This method is only available for nodes that inherit from CoreFileObject.
+The node must have been saved (have an id) before calling this method.
+
+**Args:**
+
+- `dest`: Optional destination path. If provided, the file will be streamed
+ directly to this path (memory-efficient for large files) and the
+ number of bytes written will be returned. If not provided, the
+ file content will be returned as bytes.
+
+**Returns:**
+
+- If ``dest`` is None: The file content as bytes.
+- If ``dest`` is provided: The number of bytes written to the file.
+
+**Raises:**
+
+- `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject.
+- `ValueError`: If the node hasn't been saved yet or file not found.
+- `AuthenticationError`: If authentication fails.
+
+**Examples:**
+
+```python
+>>> # Download to memory
+>>> content = contract.download_file()
+>>> # Stream to file (memory-efficient for large files)
+>>> bytes_written = contract.download_file(dest=Path("/tmp/contract.pdf"))
+```
+
+#### `delete`
+
+```python
+delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `save`
+
+```python
+save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `generate_query_data`
+
+```python
+generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+#### `generate_query_data_node`
+
+```python
+generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+Generate the node part of a GraphQL Query with attributes and nodes.
+
+**Args:**
+
+- `include`: List of attributes or relationships to include. Defaults to None.
+- `exclude`: List of attributes or relationships to exclude. Defaults to None.
+- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well.
+ Defaults to True.
+- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship.
+- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+
+- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format
+
+#### `add_relationships`
+
+```python
+add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `remove_relationships`
+
+```python
+remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `create`
+
+```python
+create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `update`
+
+```python
+update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `get_pool_allocated_resources`
+
+```python
+get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync]
+```
+
+Fetch all nodes that were allocated for the pool and a given resource.
+
+**Args:**
+
+- `resource`: The resource from which the nodes were allocated.
+
+**Returns:**
+
+- list\[InfrahubNodeSync]: The allocated nodes.
+
+#### `get_pool_resources_utilization`
+
+```python
+get_pool_resources_utilization(self) -> list[dict[str, Any]]
+```
+
+Fetch the utilization of each resource for the pool.
+
+**Returns:**
+
+- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool.
+
+#### `get_flat_value`
+
+```python
+get_flat_value(self, key: str, separator: str = '__') -> Any
+```
+
+Query recursively a value defined in a flat notation (string), on a hierarchy of objects
+
+**Examples:**
+
+name__value
+module.object.value
+
+#### `extract`
+
+```python
+extract(self, params: dict[str, str]) -> dict[str, Any]
+```
+
+Extract some data points defined in a flat notation.
+
+### `InfrahubNodeBase`
+
+Base class for InfrahubNode and InfrahubNodeSync
+
+**Methods:**
+
+#### `get_branch`
+
+```python
+get_branch(self) -> str
+```
+
+#### `get_path_value`
+
+```python
+get_path_value(self, path: str) -> Any
+```
+
+#### `get_human_friendly_id`
+
+```python
+get_human_friendly_id(self) -> list[str] | None
+```
+
+#### `get_human_friendly_id_as_string`
+
+```python
+get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None
+```
+
+#### `hfid`
+
+```python
+hfid(self) -> list[str] | None
+```
+
+#### `hfid_str`
+
+```python
+hfid_str(self) -> str | None
+```
+
+#### `get_node_metadata`
+
+```python
+get_node_metadata(self) -> NodeMetadata | None
+```
+
+Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched.
+
+#### `get_kind`
+
+```python
+get_kind(self) -> str
+```
+
+#### `get_all_kinds`
+
+```python
+get_all_kinds(self) -> list[str]
+```
+
+#### `is_ip_prefix`
+
+```python
+is_ip_prefix(self) -> bool
+```
+
+#### `is_ip_address`
+
+```python
+is_ip_address(self) -> bool
+```
+
+#### `is_resource_pool`
+
+```python
+is_resource_pool(self) -> bool
+```
+
+#### `is_file_object`
+
+```python
+is_file_object(self) -> bool
+```
+
+Check if this node inherits from CoreFileObject and supports file uploads.
+
+#### `upload_from_path`
+
+```python
+upload_from_path(self, path: Path) -> None
+```
+
+Set a file from disk to be uploaded when saving this FileObject node.
+
+The file will be streamed during upload, avoiding loading the entire file into memory.
+
+**Args:**
+
+- `path`: Path to the file on disk.
+
+**Raises:**
+
+- `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject.
+
+#### `upload_from_bytes`
+
+```python
+upload_from_bytes(self, content: bytes | BinaryIO, name: str) -> None
+```
+
+Set content to be uploaded when saving this FileObject node.
+
+The content can be provided as bytes or a file-like object.
+Using BinaryIO is recommended for large content to stream during upload.
+
+**Args:**
+
+- `content`: The file content as bytes or a file-like object.
+- `name`: The filename to use for the uploaded file.
+
+**Raises:**
+
+- `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject.
+
+**Examples:**
+
+```python
+>>> # Using bytes (for small files)
+>>> node.upload_from_bytes(content=b"file content", name="example.txt")
+>>> # Using file-like object (for large files)
+>>> with open("/path/to/file.bin", "rb") as f:
+... node.upload_from_bytes(content=f, name="file.bin")
+```
+
+#### `clear_file`
+
+```python
+clear_file(self) -> None
+```
+
+Clear any pending file content.
+
+#### `get_raw_graphql_data`
+
+```python
+get_raw_graphql_data(self) -> dict | None
+```
+
+#### `generate_query_data_init`
+
+```python
+generate_query_data_init(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx
new file mode 100644
index 00000000..f70c6788
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx
@@ -0,0 +1,16 @@
+---
+title: parsers
+sidebarTitle: parsers
+---
+
+# `infrahub_sdk.node.parsers`
+
+## Functions
+
+### `parse_human_friendly_id`
+
+```python
+parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]
+```
+
+Parse a human-friendly ID into a kind and an identifier.
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx
new file mode 100644
index 00000000..a7400483
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx
@@ -0,0 +1,12 @@
+---
+title: property
+sidebarTitle: property
+---
+
+# `infrahub_sdk.node.property`
+
+## Classes
+
+### `NodeProperty`
+
+Represents a property of a node, typically used for metadata like display labels.
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx
new file mode 100644
index 00000000..edc1112c
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx
@@ -0,0 +1,126 @@
+---
+title: related_node
+sidebarTitle: related_node
+---
+
+# `infrahub_sdk.node.related_node`
+
+## Classes
+
+### `RelatedNodeBase`
+
+Base class for representing a related node in a relationship.
+
+**Methods:**
+
+#### `id`
+
+```python
+id(self) -> str | None
+```
+
+#### `hfid`
+
+```python
+hfid(self) -> list[Any] | None
+```
+
+#### `hfid_str`
+
+```python
+hfid_str(self) -> str | None
+```
+
+#### `is_resource_pool`
+
+```python
+is_resource_pool(self) -> bool
+```
+
+#### `initialized`
+
+```python
+initialized(self) -> bool
+```
+
+#### `display_label`
+
+```python
+display_label(self) -> str | None
+```
+
+#### `typename`
+
+```python
+typename(self) -> str | None
+```
+
+#### `kind`
+
+```python
+kind(self) -> str | None
+```
+
+#### `is_from_profile`
+
+```python
+is_from_profile(self) -> bool
+```
+
+Return whether this relationship was set from a profile. Done by checking if the source is of a profile kind.
+
+#### `get_relationship_metadata`
+
+```python
+get_relationship_metadata(self) -> RelationshipMetadata | None
+```
+
+Returns the relationship metadata (updated_at, updated_by) if fetched.
+
+### `RelatedNode`
+
+Represents a RelatedNodeBase in an asynchronous context.
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self, timeout: int | None = None) -> None
+```
+
+#### `peer`
+
+```python
+peer(self) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self) -> InfrahubNode
+```
+
+### `RelatedNodeSync`
+
+Represents a related node in a synchronous context.
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self, timeout: int | None = None) -> None
+```
+
+#### `peer`
+
+```python
+peer(self) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self) -> InfrahubNodeSync
+```
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx
new file mode 100644
index 00000000..567b7c8d
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx
@@ -0,0 +1,114 @@
+---
+title: relationship
+sidebarTitle: relationship
+---
+
+# `infrahub_sdk.node.relationship`
+
+## Classes
+
+### `RelationshipManagerBase`
+
+Base class for RelationshipManager and RelationshipManagerSync
+
+**Methods:**
+
+#### `peer_ids`
+
+```python
+peer_ids(self) -> list[str]
+```
+
+#### `peer_hfids`
+
+```python
+peer_hfids(self) -> list[list[Any]]
+```
+
+#### `peer_hfids_str`
+
+```python
+peer_hfids_str(self) -> list[str]
+```
+
+#### `has_update`
+
+```python
+has_update(self) -> bool
+```
+
+#### `is_from_profile`
+
+```python
+is_from_profile(self) -> bool
+```
+
+Return whether this relationship was set from a profile. All its peers must be from a profile.
+
+### `RelationshipManager`
+
+Manages relationships of a node in an asynchronous context.
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self) -> None
+```
+
+#### `add`
+
+```python
+add(self, data: str | RelatedNode | dict) -> None
+```
+
+Add a new peer to this relationship.
+
+#### `extend`
+
+```python
+extend(self, data: Iterable[str | RelatedNode | dict]) -> None
+```
+
+Add new peers to this relationship.
+
+#### `remove`
+
+```python
+remove(self, data: str | RelatedNode | dict) -> None
+```
+
+### `RelationshipManagerSync`
+
+Manages relationships of a node in a synchronous context.
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self) -> None
+```
+
+#### `add`
+
+```python
+add(self, data: str | RelatedNodeSync | dict) -> None
+```
+
+Add a new peer to this relationship.
+
+#### `extend`
+
+```python
+extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None
+```
+
+Add new peers to this relationship.
+
+#### `remove`
+
+```python
+remove(self, data: str | RelatedNodeSync | dict) -> None
+```
diff --git a/docs/docs/python-sdk/topics/object_file.mdx b/docs/docs/python-sdk/topics/object_file.mdx
index 751599f0..75744d89 100644
--- a/docs/docs/python-sdk/topics/object_file.mdx
+++ b/docs/docs/python-sdk/topics/object_file.mdx
@@ -68,13 +68,13 @@ spec:
> Multiple documents in a single YAML file are also supported, each document will be loaded separately. Documents are separated by `---`
-### Data Processing Parameters
+### Data processing parameters
The `parameters` field controls how the data in the object file is processed before loading into Infrahub:
-| Parameter | Description | Default |
-| -------------- | ------------------------------------------------------------------------------------------------------- | ------- |
-| `expand_range` | When set to `true`, range patterns (e.g., `[1-5]`) in string fields are expanded into multiple objects. | `false` |
+| Parameter | Description | Default |
+| -------------- | -------------------------------------------------------------------------------------------------------------- | ------- |
+| `expand_range` | When set to `true`, range patterns (for example, `[1-5]`) in string fields are expanded into multiple objects. | `false` |
When `expand_range` is not specified, it defaults to `false`.
@@ -208,9 +208,9 @@ Metadata support is planned for future releases. Currently, the Object file does
3. Validate object files before loading them into production environments.
4. Use comments in your YAML files to document complex relationships or dependencies.
-## Range Expansion in Object Files
+## Range expansion in object files
-The Infrahub Python SDK supports **range expansion** for string fields in object files when the `parameters > expand_range` is set to `true`. This feature allows you to specify a range pattern (e.g., `[1-5]`) in any string value, and the SDK will automatically expand it into multiple objects during validation and processing.
+The Infrahub Python SDK supports **range expansion** for string fields in object files when the `parameters > expand_range` is set to `true`. This feature allows you to specify a range pattern (for example, `[1-5]`) in any string value, and the SDK will automatically expand it into multiple objects during validation and processing.
```yaml
---
@@ -225,7 +225,7 @@ spec:
type: Country
```
-### How Range Expansion Works
+### How range expansion works
- Any string field containing a pattern like `[1-5]`, `[10-15]`, or `[1,3,5]` will be expanded into multiple objects.
- If multiple fields in the same object use range expansion, **all expanded lists must have the same length**. If not, validation will fail.
@@ -233,7 +233,7 @@ spec:
### Examples
-#### Single Field Expansion
+#### Single field expansion
```yaml
spec:
@@ -256,7 +256,7 @@ This will expand to:
type: Country
```
-#### Multiple Field Expansion (Matching Lengths)
+#### Multiple field expansion (matching lengths)
```yaml
spec:
@@ -283,7 +283,7 @@ This will expand to:
type: Country
```
-#### Error: Mismatched Range Lengths
+#### Error: mismatched range lengths
If you use ranges of different lengths in multiple fields:
diff --git a/docs/docs_generation/__init__.py b/docs/docs_generation/__init__.py
new file mode 100644
index 00000000..4b462a71
--- /dev/null
+++ b/docs/docs_generation/__init__.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from .content_gen_methods import (
+ ACommand,
+ ADocContentGenMethod,
+ CommandOutputDocContentGenMethod,
+ FilePrintingDocContentGenMethod,
+ Jinja2DocContentGenMethod,
+ MdxCodeDocumentation,
+ TyperGroupCommand,
+ TyperSingleCommand,
+)
+from .pages import DocPage, MDXDocPage
+
+__all__ = [
+ "ACommand",
+ "ADocContentGenMethod",
+ "CommandOutputDocContentGenMethod",
+ "DocPage",
+ "FilePrintingDocContentGenMethod",
+ "Jinja2DocContentGenMethod",
+ "MDXDocPage",
+ "MdxCodeDocumentation",
+ "TyperGroupCommand",
+ "TyperSingleCommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/__init__.py b/docs/docs_generation/content_gen_methods/__init__.py
new file mode 100644
index 00000000..792539be
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/__init__.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods.command.command import ACommand
+from docs.docs_generation.content_gen_methods.command.typer_command import TyperGroupCommand, TyperSingleCommand
+from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import ACodeDocumentation, MdxCodeDocumentation
+from docs.docs_generation.content_gen_methods.mdx.mdx_collapsed_overload_code_doc import (
+ CollapsedOverloadCodeDocumentation,
+)
+from docs.docs_generation.content_gen_methods.mdx.mdx_ordered_code_doc import OrderedMdxCodeDocumentation
+
+from .base import ADocContentGenMethod
+from .command_output_method import CommandOutputDocContentGenMethod
+from .file_printing_method import FilePrintingDocContentGenMethod
+from .jinja2_method import Jinja2DocContentGenMethod
+
+__all__ = [
+ "ACodeDocumentation",
+ "ACommand",
+ "ADocContentGenMethod",
+ "CollapsedOverloadCodeDocumentation",
+ "CommandOutputDocContentGenMethod",
+ "FilePrintingDocContentGenMethod",
+ "Jinja2DocContentGenMethod",
+ "MdxCodeDocumentation",
+ "OrderedMdxCodeDocumentation",
+ "TyperGroupCommand",
+ "TyperSingleCommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/base.py b/docs/docs_generation/content_gen_methods/base.py
new file mode 100644
index 00000000..ddab2dde
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/base.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+
+
+class ADocContentGenMethod(ABC):
+ """Strategy for producing documentation content as a string.
+
+ Each subclass implements ``apply()`` for a specific content source
+ (Jinja2 template, CLI command, pre-generated file, ...).
+ """
+
+ @abstractmethod
+ def apply(self) -> str:
+ """Generate the documentation content."""
diff --git a/docs/docs_generation/content_gen_methods/command/__init__.py b/docs/docs_generation/content_gen_methods/command/__init__.py
new file mode 100644
index 00000000..a1bc4e0a
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from .command import ACommand
+
+__all__ = [
+ "ACommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/command/command.py b/docs/docs_generation/content_gen_methods/command/command.py
new file mode 100644
index 00000000..83a646c2
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/command.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+
+
+class ACommand(ABC):
+ """Abstract base for building a shell command string."""
+
+ @abstractmethod
+ def build(self) -> str:
+ """Return the full command string to execute."""
diff --git a/docs/docs_generation/content_gen_methods/command/typer_command.py b/docs/docs_generation/content_gen_methods/command/typer_command.py
new file mode 100644
index 00000000..180f9b57
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/typer_command.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from abc import ABC
+
+from .command import ACommand
+
+
+class ATyperCommand(ACommand, ABC):
+ def __init__(self, name: str) -> None:
+ self.name = name
+
+
+class TyperSingleCommand(ATyperCommand):
+ """A single (non-group) infrahubctl command."""
+
+ def build(self) -> str:
+ return (
+ f'uv run typer --func {self.name} infrahub_sdk.ctl.cli_commands utils docs --name "infrahubctl {self.name}"'
+ )
+
+
+class TyperGroupCommand(ATyperCommand):
+ """An infrahubctl command group (e.g. ``branch``, ``schema``)."""
+
+ def build(self) -> str:
+ return f'uv run typer infrahub_sdk.ctl.{self.name} utils docs --name "infrahubctl {self.name}"'
diff --git a/docs/docs_generation/content_gen_methods/command_output_method.py b/docs/docs_generation/content_gen_methods/command_output_method.py
new file mode 100644
index 00000000..bbb32f04
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command_output_method.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+import tempfile
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+ from .command import ACommand
+
+from .base import ADocContentGenMethod
+
+
+class CommandOutputDocContentGenMethod(ADocContentGenMethod):
+ """Run a command and return the content it writes to a temporary file.
+
+ ``--output `` is appended to the command automatically.
+
+ Args:
+ context: Invoke execution context.
+ working_directory: Directory in which the command is executed.
+ command: An ``ACommand`` whose ``build()`` returns the base command string.
+
+ Example::
+
+ method = CommandOutputDocContentGenMethod(
+ context=ctx,
+ working_directory=project_root,
+ command=TyperCommand(module="infrahub_sdk.ctl.cli_commands", name="dump", app_name="infrahubctl", is_function=True),
+ )
+ content = method.apply()
+ """
+
+ def __init__(self, context: Context, working_directory: Path, command: ACommand) -> None:
+ self.context = context
+ self.working_directory = working_directory
+ self.command = command
+
+ def apply(self) -> str:
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".mdx", delete=False, encoding="utf-8") as tmp:
+ tmp_path = Path(tmp.name)
+
+ try:
+ full_cmd = f"{self.command.build()} --output {tmp_path}"
+ with self.context.cd(self.working_directory):
+ self.context.run(full_cmd)
+
+ return tmp_path.read_text(encoding="utf-8")
+ finally:
+ tmp_path.unlink(missing_ok=True)
diff --git a/docs/docs_generation/content_gen_methods/file_printing_method.py b/docs/docs_generation/content_gen_methods/file_printing_method.py
new file mode 100644
index 00000000..1b07aa20
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/file_printing_method.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .base import ADocContentGenMethod
+
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import MdxFile
+
+
+class FilePrintingDocContentGenMethod(ADocContentGenMethod):
+ """Return the content of an already-generated file as-is.
+
+ Args:
+ file: The ``MdxFile`` whose content will be returned.
+ """
+
+ def __init__(self, file: MdxFile) -> None:
+ self.file = file
+
+ def apply(self) -> str:
+ return self.file.content
diff --git a/docs/docs_generation/content_gen_methods/jinja2_method.py b/docs/docs_generation/content_gen_methods/jinja2_method.py
new file mode 100644
index 00000000..c91c0d7c
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/jinja2_method.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+import asyncio
+from typing import TYPE_CHECKING, Any
+
+from .base import ADocContentGenMethod
+
+if TYPE_CHECKING:
+ from infrahub_sdk.template import Jinja2Template
+
+
+class Jinja2DocContentGenMethod(ADocContentGenMethod):
+ """Render a template using a ``Jinja2Template``.
+
+ The template engine is async; rendering is run synchronously via ``asyncio.run``.
+
+ Args:
+ template: A ``Jinja2Template`` instance.
+ template_variables: Variables passed to the template during rendering.
+
+ Example::
+
+ template = Jinja2Template(
+ template=Path("sdk_template_reference.j2"),
+ template_directory=docs_dir / "_templates",
+ )
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"builtin": BUILTIN_FILTERS},
+ )
+ content = method.apply()
+ """
+
+ def __init__(self, template: Jinja2Template, template_variables: dict[str, Any]) -> None:
+ self.template = template
+ self.template_variables = template_variables
+
+ def apply(self) -> str:
+ return asyncio.run(self.template.render(variables=self.template_variables))
diff --git a/docs/docs_generation/content_gen_methods/mdx/__init__.py b/docs/docs_generation/content_gen_methods/mdx/__init__.py
new file mode 100644
index 00000000..04032f7c
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/__init__.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+from .mdx_code_doc import ACodeDocumentation, MdxCodeDocumentation, MdxFile
+from .mdx_collapsed_overload_code_doc import CollapsedOverloadCodeDocumentation
+from .mdx_collapsed_overload_section import CollapsedOverloadSection
+from .mdx_ordered_code_doc import OrderedMdxCodeDocumentation
+from .mdx_ordered_section import OrderedMdxSection
+from .mdx_section import MdxSection
+
+__all__ = [
+ "ACodeDocumentation",
+ "CollapsedOverloadCodeDocumentation",
+ "CollapsedOverloadSection",
+ "MdxCodeDocumentation",
+ "MdxFile",
+ "MdxSection",
+ "OrderedMdxCodeDocumentation",
+ "OrderedMdxSection",
+]
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py
new file mode 100644
index 00000000..1fb3e8c9
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py
@@ -0,0 +1,127 @@
+from __future__ import annotations
+
+import tempfile
+from abc import ABC, abstractmethod
+from dataclasses import dataclass
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+
+def _wrap_doctest_examples(content: str) -> str:
+ """Wrap bare ``>>>`` doctest blocks in fenced code blocks for MDX compatibility.
+
+ mdxify does not fence doctest examples, so curly braces and brackets
+ in those lines cause MDX/acorn parse errors.
+ """
+ lines = content.split("\n")
+ result: list[str] = []
+ in_fence = False
+ in_doctest = False
+
+ for line in lines:
+ if line.startswith("```"):
+ if in_doctest:
+ result.append("```")
+ in_doctest = False
+ in_fence = not in_fence
+ result.append(line)
+ continue
+
+ if in_fence:
+ result.append(line)
+ continue
+
+ if line.startswith(">>>"):
+ if not in_doctest:
+ result.append("```python")
+ in_doctest = True
+ result.append(line)
+ elif in_doctest:
+ if not line.strip() or line.startswith("#"):
+ result.append("```")
+ in_doctest = False
+ result.append(line)
+ else:
+ result.append(line)
+ else:
+ result.append(line)
+
+ if in_doctest:
+ result.append("```")
+
+ return "\n".join(result)
+
+
+def _source_path_from_mdx_name(mdx_filename: str) -> Path:
+ """Derive the Python source file path from an mdxify output filename.
+
+ mdxify names output files using ``-`` as a path separator, e.g.
+ ``infrahub_sdk-node-node.mdx`` comes from ``infrahub_sdk/node/node.py``.
+ """
+ stem = Path(mdx_filename).stem
+ return Path(stem.replace("-", "/")).with_suffix(".py")
+
+
+@dataclass
+class MdxFile:
+ """Content of a single ``.mdx`` file produced by mdxify."""
+
+ name: str
+ content: str
+ source_path: Path
+
+
+class ACodeDocumentation(ABC):
+ """Abstract base for code documentation generators."""
+
+ @abstractmethod
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]: ...
+
+
+class MdxCodeDocumentation(ACodeDocumentation):
+ """Run mdxify once and cache the resulting files.
+
+ Args:
+ file_filters: Substrings to exclude from output filenames.
+ Defaults to ``["__init__"]``.
+
+ Example::
+
+ doc = MdxCodeDocumentation()
+ files = doc.generate(context=ctx, modules_to_document=["infrahub_sdk.node"])
+ """
+
+ def __init__(
+ self,
+ file_filters: list[str] | None = None,
+ ) -> None:
+ self.file_filters = file_filters or ["__init__"]
+ self._cache: dict[frozenset[str], dict[str, MdxFile]] = {}
+
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ """Return mdxify results, re-running the tool when *modules_to_document* changes."""
+ key = frozenset(modules_to_document)
+ if key not in self._cache:
+ self._cache[key] = self._execute_mdxify(context, modules_to_document)
+ return self._cache[key]
+
+ def _execute_mdxify(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ exec_cmd = f"mdxify {' '.join(modules_to_document)} --output-dir {tmp_dir}"
+ context.run(exec_cmd, pty=True)
+
+ results: dict[str, MdxFile] = {}
+ for mdx_file in Path(tmp_dir).glob("*.mdx"):
+ if any(f.lower() in mdx_file.name for f in self.file_filters):
+ continue
+ content = _wrap_doctest_examples(mdx_file.read_text(encoding="utf-8"))
+ results[mdx_file.name] = MdxFile(
+ name=mdx_file.name,
+ content=content,
+ source_path=_source_path_from_mdx_name(mdx_file.name),
+ )
+
+ return results
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_code_doc.py b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_code_doc.py
new file mode 100644
index 00000000..1f680d15
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_code_doc.py
@@ -0,0 +1,133 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from itertools import groupby
+from typing import TYPE_CHECKING
+
+from .mdx_code_doc import ACodeDocumentation, MdxFile
+from .mdx_collapsed_overload_section import CollapsedOverloadSection, MethodSignature
+from .mdx_section import ASection, MdxSection, _parse_sections
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+
+@dataclass
+class CollapsedOverloadCodeDocumentation(ACodeDocumentation):
+ """Decorator around :class:`ACodeDocumentation` that collapses overloaded methods.
+
+ Delegates generation to the wrapped *documentation* instance, then
+ replaces groups of same-name H4 method sections within each class
+ with a :class:`CollapsedOverloadSection` showing the primary overload
+ and a collapsible ```` block for the rest.
+ """
+
+ documentation: ACodeDocumentation
+
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ """Generate MDX files and collapse overloaded methods in each one."""
+ files = self.documentation.generate(context, modules_to_document)
+ return {name: self._collapse_overloads(mdx_file) for name, mdx_file in files.items()}
+
+ def _collapse_overloads(self, mdx_file: MdxFile) -> MdxFile:
+ """Return a copy of *mdx_file* with overloaded methods collapsed."""
+ lines = mdx_file.content.split("\n")
+ parsed_h2 = _parse_sections(lines, heading_level=2)
+
+ processed_h2: list[ASection] = []
+ for h2 in parsed_h2.sections:
+ processed_h3 = self._process_class_sections(h2.content)
+ if processed_h3 is None:
+ # No subsection means no method to manage
+ processed_h2.append(h2)
+ else:
+ h3_parsed = _parse_sections(h2.content, heading_level=3)
+ new_lines = h3_parsed.reassembled(processed_h3)
+ processed_h2.append(
+ MdxSection(name=h2.name, heading_level=h2.heading_level, _lines=[h2.heading, *new_lines])
+ )
+
+ new_content = "\n".join(parsed_h2.reassembled(processed_h2))
+ return MdxFile(name=mdx_file.name, content=new_content, source_path=mdx_file.source_path)
+
+ def _process_class_sections(self, h2_content: list[str]) -> list[ASection] | None:
+ """Collapse overloads inside each H3 class section, or return ``None`` if nothing changed."""
+ h3_parsed = _parse_sections(h2_content, heading_level=3)
+ if not h3_parsed.sections:
+ return None
+
+ any_collapsed = False
+ processed: list[ASection] = []
+ for h3 in h3_parsed.sections:
+ collapsed_methods = self._collapse_methods_in_class(h3.content)
+ if collapsed_methods is None:
+ processed.append(h3)
+ else:
+ any_collapsed = True
+ h4_parsed = _parse_sections(h3.content, heading_level=4)
+ new_lines = h4_parsed.reassembled(collapsed_methods)
+ processed.append(
+ MdxSection(name=h3.name, heading_level=h3.heading_level, _lines=[h3.heading, *new_lines])
+ )
+
+ return processed if any_collapsed else None
+
+ def _collapse_methods_in_class(self, h3_content: list[str]) -> list[ASection] | None:
+ """Collapse consecutive same-name H4 methods, or return ``None`` if no overloads found."""
+ h4_parsed = _parse_sections(h3_content, heading_level=4)
+ if not h4_parsed.sections:
+ return None
+
+ groups = self._group_consecutive_overloads(h4_parsed.sections)
+ has_overloads = any(len(group) > 1 for group in groups)
+ if not has_overloads:
+ return None
+
+ collapsed: list[ASection] = []
+ for group in groups:
+ if len(group) == 1:
+ collapsed.append(group[0])
+ else:
+ accessors, overloads = _split_property_accessors(group)
+ collapsed.extend(accessors)
+ if len(overloads) > 1:
+ collapsed.append(CollapsedOverloadSection.from_overloads(overloads))
+ else:
+ collapsed.extend(overloads)
+ return collapsed
+
+ @staticmethod
+ def _group_consecutive_overloads(sections: list[MdxSection]) -> list[list[MdxSection]]:
+ """Group consecutive sections sharing the same name."""
+ return [list(group) for _, group in groupby(sections, key=lambda s: s.name)]
+
+
+def _split_property_accessors(sections: list[MdxSection]) -> tuple[list[MdxSection], list[MdxSection]]:
+ """Partition *sections* into property accessors and remaining overloads.
+
+ Recognised accessor patterns:
+
+ * **getter** — 0 params, non-``None`` return
+ * **setter** — 1 param, ``None`` return
+ * **deleter** — 0 params, ``None`` return
+ """
+ sigs = [(section, MethodSignature(section)) for section in sections]
+
+ def is_accessor(sig: MethodSignature) -> bool:
+ return getter_sig(sig) or setter_sig(sig) or deleter_sig(sig)
+
+ accessors = [section for section, sig in sigs if is_accessor(sig)]
+ overloads = [section for section, sig in sigs if not is_accessor(sig)]
+ return accessors, overloads
+
+
+def deleter_sig(sig: MethodSignature) -> bool:
+ return sig.param_count() == 0 and sig.return_type() == "None"
+
+
+def setter_sig(sig: MethodSignature) -> bool:
+ return sig.param_count() == 1 and sig.return_type() == "None"
+
+
+def getter_sig(sig: MethodSignature) -> bool:
+ return sig.param_count() == 0 and sig.return_type() != "None"
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py
new file mode 100644
index 00000000..85b6339d
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_collapsed_overload_section.py
@@ -0,0 +1,149 @@
+from __future__ import annotations
+
+import re
+from dataclasses import dataclass, field
+
+from .mdx_section import ASection, MdxSection
+
+
+@dataclass
+class CollapsedOverloadSection(ASection):
+ """Collapses a group of overloaded method sections into one primary entry
+ followed by a collapsible ```` block with the remaining overloads.
+
+ The *primary* overload is the one with the most parameters (excluding
+ ``self``). On ties, the first in source order wins.
+
+ Example::
+
+ >>> section = CollapsedOverloadSection.from_overloads(overload_sections)
+ >>> section.heading # delegates to primary
+ '#### `get`'
+ """
+
+ primary: MdxSection
+ others: list[MdxSection] = field(default_factory=list)
+
+ @property
+ def heading(self) -> str:
+ """Return the heading of the primary overload."""
+ return self.primary.heading
+
+ @property
+ def content(self) -> list[str]:
+ """Return primary content followed by a ```` block for the other overloads."""
+ if not self.others:
+ return self.primary.content
+
+ result = list(self.primary.content)
+ inner = [line for other in self.others for line in other.lines]
+ count = len(self.others)
+ noun = "overload" if count == 1 else "overloads"
+ result.extend(_HtmlDetailsBlock(f"Show {count} other {noun}", inner).lines())
+ return result
+
+ @classmethod
+ def from_overloads(cls, sections: list[MdxSection]) -> CollapsedOverloadSection:
+ """Create from a list of overloaded :class:`MdxSection` objects.
+
+ Selects the overload with the most parameters as *primary*.
+ On ties, the first in source order wins.
+ """
+ if not sections:
+ raise ValueError("Cannot create CollapsedOverloadSection from an empty list")
+
+ primary = max(sections, key=lambda s: MethodSignature(s).param_count())
+ others = [s for s in sections if s is not primary]
+ return cls(primary=primary, others=others)
+
+
+# --- Private collaborators ---
+
+
+@dataclass(frozen=True)
+class _HtmlDetailsBlock:
+ """A collapsible HTML ```` block."""
+
+ summary: str
+ inner_lines: list[str]
+
+ def lines(self) -> list[str]:
+ """Return the full block as a list of MDX lines."""
+ return ["", "", f"{self.summary}
", "", *self.inner_lines, "", " "]
+
+
+_CODE_FENCE_PATTERN = re.compile(r"^```python\s*$")
+_CODE_FENCE_END = re.compile(r"^```\s*$")
+
+
+def _extract_text(section: MdxSection) -> str:
+ """Extract the signature from the first code fence in *section*."""
+ in_fence = False
+ sig_lines: list[str] = []
+ for line in section.content:
+ if not in_fence and _CODE_FENCE_PATTERN.match(line):
+ in_fence = True
+ continue
+ if in_fence:
+ if _CODE_FENCE_END.match(line):
+ break
+ sig_lines.append(line)
+ return " ".join(sig_lines).strip()
+
+
+def _split_params(text: str) -> list[str]:
+ """Split *text* on commas that are not inside brackets."""
+ depth = 0
+ tokens: list[str] = []
+ current: list[str] = []
+ for char in text:
+ if char in {"[", "(", "{"}:
+ depth += 1
+ current.append(char)
+ elif char in {"]", ")", "}"}:
+ depth -= 1
+ current.append(char)
+ elif char == "," and depth == 0:
+ tokens.append("".join(current))
+ current = []
+ else:
+ current.append(char)
+ if current:
+ tokens.append("".join(current))
+ return tokens
+
+
+class MethodSignature:
+ """A Python method signature extracted from an MDX code fence.
+
+ Parses the raw signature text and counts comma-separated parameters
+ at the top level, respecting bracket nesting for generic types
+ like ``dict[str, int]``.
+ """
+
+ def __init__(self, section: MdxSection) -> None:
+ self._text = _extract_text(section)
+
+ def param_count(self) -> int:
+ """Return the number of parameters excluding ``self``."""
+ params_text = self._extract_params_text()
+ if not params_text.strip():
+ return 0
+ tokens = _split_params(params_text)
+ return len([t for t in tokens if t.strip() and t.strip() != "self"])
+
+ def return_type(self) -> str:
+ """Return the return-type annotation (e.g. ``"None"``), or ``""`` if absent."""
+ _, sep, ret = self._text.rpartition(")")
+ if not sep:
+ return ""
+ _, arrow, after_arrow = ret.partition("->")
+ return after_arrow.strip() if arrow else ""
+
+ def _extract_params_text(self) -> str:
+ """Extract the text between the first ``(`` and its last ``)``."""
+ _, sep, after_open = self._text.partition("(")
+ if not sep:
+ return ""
+ params, _, _ = after_open.rpartition(")")
+ return params
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_code_doc.py b/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_code_doc.py
new file mode 100644
index 00000000..0074c4bb
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_code_doc.py
@@ -0,0 +1,130 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from functools import cached_property
+from typing import TYPE_CHECKING
+
+from .mdx_code_doc import ACodeDocumentation, MdxFile
+from .mdx_ordered_section import OrderedMdxSection
+from .mdx_priority import SectionPriority
+from .mdx_section import ASection, MdxSection, _parse_sections
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+ from .mdx_priority import PagePriority
+
+
+@dataclass
+class PageHeadings:
+ """Heading structure extracted from an :class:`MdxFile`.
+
+ Parses lazily on first access and caches results.
+ """
+
+ mdx_file: MdxFile
+
+ @cached_property
+ def h2_names(self) -> set[str]:
+ return {s.name for s in self._h2_sections}
+
+ @cached_property
+ def h3_names(self) -> set[str]:
+ names, _ = self._h3_structure
+ return names
+
+ @cached_property
+ def h3_to_h4_names(self) -> dict[str, set[str]]:
+ _, mapping = self._h3_structure
+ return mapping
+
+ @cached_property
+ def _h2_sections(self) -> list[MdxSection]:
+ lines = self.mdx_file.content.split("\n")
+ return _parse_sections(lines, heading_level=2).sections
+
+ @cached_property
+ def _h3_structure(self) -> tuple[set[str], dict[str, set[str]]]:
+ h3_names: set[str] = set()
+ h3_to_h4: dict[str, set[str]] = {}
+ for h2 in self._h2_sections:
+ for h3 in _parse_sections(h2.content, heading_level=3).sections:
+ h3_names.add(h3.name)
+ h3_to_h4[h3.name] = {h4.name for h4 in _parse_sections(h3.content, heading_level=4).sections}
+ return h3_names, h3_to_h4
+
+ def reference_errors(self, file_key: str, priority: PagePriority) -> list[str]:
+ """Return error messages for priority references not found in this file's headings."""
+ errors: list[str] = []
+
+ errors.extend(
+ f"Priority section '{section}' not found as heading in '{file_key}'"
+ for section in priority.sections
+ if section not in self.h2_names
+ )
+
+ errors.extend(
+ f"Priority class '{cls}' not found as heading in '{file_key}'"
+ for cls in priority.classes
+ if cls not in self.h3_names
+ )
+
+ for cls_name, methods in priority.methods.items():
+ if cls_name not in self.h3_names:
+ errors.append(f"Priority methods reference unknown class '{cls_name}' in '{file_key}'")
+ continue
+ errors.extend(
+ f"Priority method '{method}' not found under class '{cls_name}' in '{file_key}'"
+ for method in methods
+ if method not in self.h3_to_h4_names.get(cls_name, set())
+ )
+
+ return errors
+
+
+@dataclass
+class OrderedMdxCodeDocumentation(ACodeDocumentation):
+ """Decorator around :class:`ACodeDocumentation` that reorders sections by priority.
+
+ Delegates generation to the wrapped *documentation* instance, then applies
+ :class:`OrderedMdxSection` reordering to pages that have a corresponding
+ :class:`PagePriority` entry.
+ """
+
+ documentation: ACodeDocumentation
+ page_priorities: dict[str, PagePriority]
+
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ files = self.documentation.generate(context, modules_to_document)
+ self._validate_references(files)
+ return {name: self._apply_priority(name, mdx_file) for name, mdx_file in files.items()}
+
+ def _validate_references(self, files: dict[str, MdxFile]) -> None:
+ errors: list[str] = []
+ for file_key, priority in self.page_priorities.items():
+ if file_key not in files:
+ errors.append(f"Priority references unknown file key '{file_key}'")
+ continue
+ errors.extend(PageHeadings(files[file_key]).reference_errors(file_key, priority))
+ if errors:
+ raise ValueError("Invalid priority configuration:\n" + "\n".join(f" - {e}" for e in errors))
+
+ def _apply_priority(self, name: str, mdx_file: MdxFile) -> MdxFile:
+ if name not in self.page_priorities:
+ return mdx_file
+
+ priority = self.page_priorities[name]
+ if not priority.sections and not priority.classes and not priority.methods:
+ return mdx_file
+
+ lines = mdx_file.content.split("\n")
+ parsed = _parse_sections(lines, heading_level=2).reordered(priority.sections)
+
+ section_priority = SectionPriority(names=priority.classes, sub_priorities=priority.methods)
+ reordered: list[ASection] = [
+ OrderedMdxSection(section=h2, priority=section_priority, child_heading_level=3) for h2 in parsed.sections
+ ]
+
+ return MdxFile(
+ name=mdx_file.name, content="\n".join(parsed.reassembled(reordered)), source_path=mdx_file.source_path
+ )
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_section.py b/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_section.py
new file mode 100644
index 00000000..e8ad4aa0
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_ordered_section.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+from .mdx_priority import SectionPriority
+from .mdx_section import ASection, MdxSection, _parse_sections
+
+
+@dataclass
+class OrderedMdxSection(ASection):
+ """Decorator around MdxSection that reorders child sections by priority.
+
+ Reorders immediate children at ``child_heading_level`` according to
+ ``priority.names``. Children whose name appears in
+ ``priority.sub_priorities`` are themselves wrapped in a nested
+ :class:`OrderedMdxSection` that reorders *their* children one
+ heading level deeper.
+ """
+
+ section: MdxSection
+ priority: SectionPriority
+ child_heading_level: int
+
+ @property
+ def heading(self) -> str:
+ return self.section.heading
+
+ @property
+ def content(self) -> list[str]:
+ if not self.priority.names and not self.priority.sub_priorities:
+ return self.section.content
+ parsed = _parse_sections(self.section.content, heading_level=self.child_heading_level)
+ if not parsed.sections:
+ return self.section.content
+ parsed = parsed.reordered(self.priority.names)
+ children = self._apply_sub_priorities(parsed.sections)
+ return parsed.reassembled(children)
+
+ def _apply_sub_priorities(self, sections: list[MdxSection]) -> list[ASection]:
+ """Wrap children that have sub-priorities in nested :class:`OrderedMdxSection`."""
+ if not self.priority.sub_priorities:
+ return list(sections)
+ result: list[ASection] = []
+ for section in sections:
+ if section.name in self.priority.sub_priorities:
+ result.append(
+ OrderedMdxSection(
+ section=section,
+ priority=SectionPriority(names=self.priority.sub_priorities[section.name]),
+ child_heading_level=self.child_heading_level + 1,
+ )
+ )
+ else:
+ result.append(section)
+ return result
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py b/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py
new file mode 100644
index 00000000..20b55513
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_priority.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+
+
+def _duplicates(items: list[str]) -> list[str]:
+ """Return items that appear more than once, in order of first extra occurrence."""
+ seen: set[str] = set()
+ dupes: list[str] = []
+ for item in items:
+ if item in seen and item not in dupes:
+ dupes.append(item)
+ seen.add(item)
+ return dupes
+
+
+@dataclass(frozen=True)
+class PagePriority:
+ """Priority ordering configuration for a single documentation page.
+
+ Attributes:
+ sections: Ordered list of H2 section names to appear first (e.g. ``"Classes"``).
+ classes: Ordered list of class/function names to appear first on the page.
+ methods: Per-class ordered list of method names to appear first.
+ Key is class name, value is ordered method name list.
+ """
+
+ sections: list[str] = field(default_factory=list)
+ classes: list[str] = field(default_factory=list)
+ methods: dict[str, list[str]] = field(default_factory=dict)
+
+ def __post_init__(self) -> None:
+ errors = [f"Duplicate section '{s}'" for s in _duplicates(self.sections)]
+ errors.extend(f"Duplicate class '{c}'" for c in _duplicates(self.classes))
+ for cls_name, methods in self.methods.items():
+ errors.extend(f"Duplicate method '{m}' for class '{cls_name}'" for m in _duplicates(methods))
+ if errors:
+ raise ValueError("Invalid priority configuration:\n" + "\n".join(f" - {e}" for e in errors))
+
+
+@dataclass(frozen=True)
+class SectionPriority:
+ """Priority configuration for reordering child sections.
+
+ Attributes:
+ names: Ordered list of child section names to appear first.
+ sub_priorities: Per-child priorities for deeper nesting.
+ Key is child name, value is ordered subsection name list.
+ """
+
+ names: list[str] = field(default_factory=list)
+ sub_priorities: dict[str, list[str]] = field(default_factory=dict)
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_section.py b/docs/docs_generation/content_gen_methods/mdx/mdx_section.py
new file mode 100644
index 00000000..ed4e25fa
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_section.py
@@ -0,0 +1,150 @@
+from __future__ import annotations
+
+import re
+from abc import ABC, abstractmethod
+from collections.abc import Sequence
+from dataclasses import dataclass, field
+
+
+class ASection(ABC):
+ """Abstract base for MDX section types."""
+
+ @property
+ @abstractmethod
+ def heading(self) -> str: ...
+
+ @property
+ @abstractmethod
+ def content(self) -> list[str]: ...
+
+ @property
+ def lines(self) -> list[str]:
+ return [self.heading, *self.content]
+
+
+@dataclass
+class MdxSection(ASection):
+ """A parsed section of MDX content delimited by a heading.
+
+ Attributes:
+ name: Item name extracted from the heading (e.g. class or method name).
+ heading_level: Markdown heading level (2, 3, or 4).
+ _lines: All lines belonging to this section, including the heading.
+ """
+
+ name: str
+ heading_level: int
+ _lines: list[str] = field(default_factory=list)
+
+ @property
+ def heading(self) -> str:
+ return self._lines[0]
+
+ @property
+ def content(self) -> list[str]:
+ return self._lines[1:]
+
+ @property
+ def lines(self) -> list[str]:
+ return self._lines
+
+
+def _heading_level(line: str) -> int | None:
+ """Return the heading level (1-6) if *line* is a Markdown heading, else ``None``."""
+ match = re.match(r"^(#{1,6})\s", line)
+ return len(match.group(1)) if match else None
+
+
+def _extract_heading_name(line: str) -> str:
+ """Extract the bare name from a heading, stripping backtick quoting.
+
+ Handles ``### `ClassName` `` → ``ClassName`` as well as plain headings.
+ """
+ match = re.match(r"^#{1,6}\s+`([^`]+)`", line)
+ if match:
+ return match.group(1)
+ match = re.match(r"^#{1,6}\s+(.+)", line)
+ if match:
+ return match.group(1).strip()
+ return ""
+
+
+@dataclass
+class ParsedContent:
+ """Result of splitting MDX lines at a heading level.
+
+ Holds the *preamble* (lines before the first heading) and the
+ *sections* found at that level. Provides reordering and reassembly
+ as instance methods so these operations stay with the data they act on.
+ """
+
+ preamble: list[str]
+ sections: list[MdxSection]
+
+ @property
+ def lines(self) -> list[str]:
+ """Reassemble preamble and sections into a flat line list."""
+ return self.reassembled(self.sections)
+
+ def reassembled(self, sections: Sequence[ASection]) -> list[str]:
+ """Combine this preamble with *sections* into a flat line list."""
+ result = list(self.preamble)
+ for section in sections:
+ result.extend(section.lines)
+ return result
+
+ def reordered(self, names: list[str]) -> ParsedContent:
+ """Return a new :class:`ParsedContent` with sections reordered by *names*.
+
+ Priority sections appear in the order given by *names*.
+ Non-priority sections retain their original relative order.
+ Handles overloaded names (multiple sections sharing a name) by grouping them.
+ Names not found in *sections* are silently skipped.
+ """
+ if not names:
+ return self
+ by_name: dict[str, list[MdxSection]] = {}
+ for section in self.sections:
+ by_name.setdefault(section.name, []).append(section)
+ ordered: list[MdxSection] = []
+ used: set[str] = set()
+ for name in names:
+ if name in by_name and name not in used:
+ ordered.extend(by_name[name])
+ used.add(name)
+ ordered.extend(s for s in self.sections if s.name not in used)
+ return ParsedContent(preamble=self.preamble, sections=ordered)
+
+
+def _parse_sections(lines: list[str], heading_level: int) -> ParsedContent:
+ """Split *lines* into a preamble and sections at *heading_level*.
+
+ Returns a :class:`ParsedContent` where *preamble* contains every line
+ before the first heading at the target level, and each
+ :class:`MdxSection` runs from its heading until the next heading at the
+ same level (or the end of the input).
+ """
+ preamble: list[str] = []
+ sections: list[MdxSection] = []
+ current_section: MdxSection | None = None
+
+ for line in lines:
+ level = _heading_level(line)
+ if level == heading_level:
+ if current_section is not None:
+ # current_section is now complete
+ sections.append(current_section)
+ name = _extract_heading_name(line)
+ current_section = MdxSection(name=name, heading_level=heading_level, _lines=[line])
+ elif current_section is not None:
+ # If this not the beginning of a new section of the same heading level, this means this line is a part of
+ # the current section
+ current_section.lines.append(line)
+ else:
+ # There is no current section, these lines are part of preamble
+ preamble.append(line)
+
+ if current_section is not None:
+ sections.append(current_section)
+
+ return ParsedContent(preamble=preamble, sections=sections)
diff --git a/docs/docs_generation/helpers.py b/docs/docs_generation/helpers.py
new file mode 100644
index 00000000..9951a80c
--- /dev/null
+++ b/docs/docs_generation/helpers.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import Any
+
+from pydantic_settings import EnvSettingsSource
+
+from infrahub_sdk.config import ConfigBase
+
+
+def get_env_vars() -> dict[str, list[str]]:
+ """Extract environment variable names for each field of ``ConfigBase``.
+
+ Returns:
+ Mapping of field name to list of upper-cased environment variable names.
+ """
+ env_vars: dict[str, list[str]] = defaultdict(list)
+ settings = ConfigBase()
+ env_settings = EnvSettingsSource(settings.__class__, env_prefix=settings.model_config.get("env_prefix", ""))
+
+ for field_name, field in settings.model_fields.items():
+ for field_key, field_env_name, _ in env_settings._extract_field_info(field, field_name):
+ env_vars[field_key].append(field_env_name.upper())
+
+ return env_vars
+
+
+def _resolve_allof(prop: dict[str, Any], definitions: dict[str, Any]) -> tuple[list[Any], str]:
+ """Resolve an ``allOf`` JSON Schema reference to extract enum choices and type."""
+ if "allOf" not in prop:
+ return [], ""
+ ref_name = prop["allOf"][0]["$ref"].split("/")[-1]
+ ref_def = definitions.get(ref_name, {})
+ return ref_def.get("enum", []), ref_def.get("type", "")
+
+
+def _resolve_anyof_type(prop: dict[str, Any]) -> str:
+ """Resolve an ``anyOf`` to a comma-separated type string, excluding ``null``."""
+ if "anyOf" not in prop:
+ return ""
+ return ", ".join(i["type"] for i in prop["anyOf"] if "type" in i and i["type"] != "null")
+
+
+def build_config_properties() -> list[dict[str, Any]]:
+ """Build the list of configuration properties for SDK config documentation.
+
+ Returns:
+ List of dicts with keys: ``name``, ``description``, ``type``,
+ ``choices``, ``default``, ``env_vars``.
+ """
+ schema = ConfigBase.model_json_schema()
+ env_vars = get_env_vars()
+ definitions = schema.get("$defs", {})
+
+ properties = []
+ for name, prop in schema["properties"].items():
+ choices, kind = _resolve_allof(prop, definitions)
+ composed_type = _resolve_anyof_type(prop)
+
+ properties.append(
+ {
+ "name": name,
+ "description": prop.get("description", ""),
+ "type": prop.get("type", kind) or composed_type or "object",
+ "choices": choices,
+ "default": prop.get("default", ""),
+ "env_vars": env_vars.get(name, []),
+ }
+ )
+
+ return properties
diff --git a/docs/docs_generation/pages/__init__.py b/docs/docs_generation/pages/__init__.py
new file mode 100644
index 00000000..58211bd4
--- /dev/null
+++ b/docs/docs_generation/pages/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from .base import DocPage, MDXDocPage
+
+__all__ = [
+ "DocPage",
+ "MDXDocPage",
+]
diff --git a/docs/docs_generation/pages/base.py b/docs/docs_generation/pages/base.py
new file mode 100644
index 00000000..6527d1b8
--- /dev/null
+++ b/docs/docs_generation/pages/base.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods import ADocContentGenMethod
+
+
+class DocPage:
+ """A documentation page whose content is produced by an injected generation method.
+
+ Args:
+ content_gen_method: Strategy that produces the page content as a string.
+
+ Example::
+
+ page = DocPage(content_gen_method=Jinja2DocContentGenMethod(...))
+ print(page.content())
+ """
+
+ def __init__(self, content_gen_method: ADocContentGenMethod) -> None:
+ self.content_gen_method = content_gen_method
+
+ def content(self) -> str:
+ return self.content_gen_method.apply()
+
+
+class MDXDocPage:
+ """Decorator which is a documentation page that can be written in an ``.mdx`` file.
+
+ Args:
+ page: The documentation page whose content will be rendered.
+ output_path: File path where the ``.mdx`` output will be written.
+
+ Example::
+
+ mdx = MDXDocPage(page=my_page, output_path=Path("docs/ref/client.mdx"))
+ mdx.to_mdx()
+ """
+
+ def __init__(self, page: DocPage, output_path: Path) -> None:
+ self.page = page
+ self.output_path = output_path
+
+ def to_mdx(self) -> None:
+ rendered = self.page.content()
+ self.output_path.parent.mkdir(parents=True, exist_ok=True)
+ self.output_path.write_text(rendered, encoding="utf-8")
+ print(f"Docs saved to: {self.output_path}")
diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts
index c9f85753..e3a23cca 100644
--- a/docs/docusaurus.config.ts
+++ b/docs/docusaurus.config.ts
@@ -37,7 +37,7 @@ const config: Config = {
editUrl: "https://github.com/opsmill/infrahub-sdk-python/tree/stable/docs",
path: 'docs/python-sdk',
routeBasePath: 'python-sdk',
- sidebarPath: './sidebars-python-sdk.ts',
+ sidebarPath: './sidebars/sidebars-python-sdk.ts',
sidebarCollapsed: true,
},
blog: false,
@@ -55,7 +55,7 @@ const config: Config = {
path: 'docs/infrahubctl',
routeBasePath: 'infrahubctl',
sidebarCollapsed: false,
- sidebarPath: './sidebars-infrahubctl.ts',
+ sidebarPath: './sidebars/sidebars-infrahubctl.ts',
},
],
],
diff --git a/docs/package-lock.json b/docs/package-lock.json
index 6a594bda..c7241869 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -8,10 +8,12 @@
"name": "docs",
"version": "0.0.0",
"dependencies": {
- "@docusaurus/core": "^3.8.1",
- "@docusaurus/preset-classic": "^3.8.1",
+ "@docusaurus/core": "^3.9.2",
+ "@docusaurus/preset-classic": "^3.9.2",
+ "@iconify/react": "^6.0.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
+ "markdownlint-cli2": "^0.20.0",
"prism-react-renderer": "^2.3.0",
"raw-loader": "^4.0.2",
"react": "^18.0.0",
@@ -19,95 +21,26 @@
"react-player": "^3.3.2"
},
"devDependencies": {
- "@docusaurus/module-type-aliases": "^3.8.1",
- "@docusaurus/tsconfig": "^3.8.1",
- "@docusaurus/types": "^3.8.1",
- "typescript": "~5.5.2"
+ "@docusaurus/module-type-aliases": "^3.9.2",
+ "@docusaurus/tsconfig": "^3.9.2",
+ "@docusaurus/types": "^3.9.2",
+ "typescript": "~5.5.2",
+ "vitest": "^4.0.17"
},
"engines": {
"node": ">=18.0"
}
},
- "node_modules/@ai-sdk/gateway": {
- "version": "1.0.33",
- "resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-1.0.33.tgz",
- "integrity": "sha512-v9i3GPEo4t3fGcSkQkc07xM6KJN75VUv7C1Mqmmsu2xD8lQwnQfsrgAXyNuWe20yGY0eHuheSPDZhiqsGKtH1g==",
- "license": "Apache-2.0",
- "dependencies": {
- "@ai-sdk/provider": "2.0.0",
- "@ai-sdk/provider-utils": "3.0.10",
- "@vercel/oidc": "^3.0.1"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "zod": "^3.25.76 || ^4.1.8"
- }
- },
- "node_modules/@ai-sdk/provider": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz",
- "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==",
- "license": "Apache-2.0",
- "dependencies": {
- "json-schema": "^0.4.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@ai-sdk/provider-utils": {
- "version": "3.0.10",
- "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.10.tgz",
- "integrity": "sha512-T1gZ76gEIwffep6MWI0QNy9jgoybUHE7TRaHB5k54K8mF91ciGFlbtCGxDYhMH3nCRergKwYFIDeFF0hJSIQHQ==",
- "license": "Apache-2.0",
- "dependencies": {
- "@ai-sdk/provider": "2.0.0",
- "@standard-schema/spec": "^1.0.0",
- "eventsource-parser": "^3.0.5"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "zod": "^3.25.76 || ^4.1.8"
- }
- },
- "node_modules/@ai-sdk/react": {
- "version": "2.0.60",
- "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-2.0.60.tgz",
- "integrity": "sha512-Ev0MC0I7eDcCH4FnrHzK48g9bJjyF3F67MMq76qoVsbtcs6fGIO5RjmYgPoFeSo8/yQ5EM6i/14yfcD0oB+moA==",
- "license": "Apache-2.0",
- "dependencies": {
- "@ai-sdk/provider-utils": "3.0.10",
- "ai": "5.0.60",
- "swr": "^2.2.5",
- "throttleit": "2.1.0"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "react": "^18 || ^19 || ^19.0.0-rc",
- "zod": "^3.25.76 || ^4.1.8"
- },
- "peerDependenciesMeta": {
- "zod": {
- "optional": true
- }
- }
- },
"node_modules/@algolia/abtesting": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.5.0.tgz",
- "integrity": "sha512-W/ohRkbKQsqDWALJg28X15KF7Tcyg53L1MfdOkLgvkcCcofdzGHSimHHeNG05ojjFw9HK8+VPhe/Vwq4MozIJg==",
+ "version": "1.14.0",
+ "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.14.0.tgz",
+ "integrity": "sha512-cZfj+1Z1dgrk3YPtNQNt0H9Rr67P8b4M79JjUKGS0d7/EbFbGxGgSu6zby5f22KXo3LT0LZa4O2c6VVbupJuDg==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
@@ -146,99 +79,99 @@
}
},
"node_modules/@algolia/client-abtesting": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.39.0.tgz",
- "integrity": "sha512-Vf0ZVe+qo3sHDrCinouJqlg8VoxM4Qo/KxNIqMYybkuctutfnp3kIY9OmESplOQ/9NGBthU9EG+4d5fBibWK/A==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.48.0.tgz",
+ "integrity": "sha512-n17WSJ7vazmM6yDkWBAjY12J8ERkW9toOqNgQ1GEZu/Kc4dJDJod1iy+QP5T/UlR3WICgZDi/7a/VX5TY5LAPQ==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-analytics": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.39.0.tgz",
- "integrity": "sha512-V16ITZxYIwcv1arNce65JZmn94Ft6vKlBZ//gXw8AvIH32glJz1KcbaVAUr9p7PYlGZ/XVHP6LxDgrpNdtwgcA==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.48.0.tgz",
+ "integrity": "sha512-v5bMZMEqW9U2l40/tTAaRyn4AKrYLio7KcRuHmLaJtxuJAhvZiE7Y62XIsF070juz4MN3eyvfQmI+y5+OVbZuA==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-common": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.39.0.tgz",
- "integrity": "sha512-UCJTuwySEQeiKPWV3wruhuI/wHbDYenHzgL9pYsvh6r/u5Z+g61ip1iwdAlFp02CnywzI9O7+AQPh2ManYyHmQ==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.48.0.tgz",
+ "integrity": "sha512-7H3DgRyi7UByScc0wz7EMrhgNl7fKPDjKX9OcWixLwCj7yrRXDSIzwunykuYUUO7V7HD4s319e15FlJ9CQIIFQ==",
"license": "MIT",
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-insights": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.39.0.tgz",
- "integrity": "sha512-s0ia8M/ZZR+iO2uLNTBrlQdEb6ZMAMcKMHckp5mcoglxrf8gHifL4LmdhGKdAxAn3UIagtqIP0RCnIymHUbm7A==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.48.0.tgz",
+ "integrity": "sha512-tXmkB6qrIGAXrtRYHQNpfW0ekru/qymV02bjT0w5QGaGw0W91yT+53WB6dTtRRsIrgS30Al6efBvyaEosjZ5uw==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-personalization": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.39.0.tgz",
- "integrity": "sha512-vZPIt7Lw+toNsHZUiPhNIc1Z3vUjDp7nzn6AMOaPC73gEuTq2iLPNvM06CSB6aHePo5eMeJIP5YEKBUQUA/PJA==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.48.0.tgz",
+ "integrity": "sha512-4tXEsrdtcBZbDF73u14Kb3otN+xUdTVGop1tBjict+Rc/FhsJQVIwJIcTrOJqmvhtBfc56Bu65FiVOnpAZCxcw==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-query-suggestions": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.39.0.tgz",
- "integrity": "sha512-jcPQr3iKTWNVli2NYHPv02aNLwixDjPCpOgMp9CZTvEiPI6Ec4jHX+oFr3LDZagOFY9e1xJhc/JrgMGGW1sHnw==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.48.0.tgz",
+ "integrity": "sha512-unzSUwWFpsDrO8935RhMAlyK0Ttua/5XveVIwzfjs5w+GVBsHgIkbOe8VbBJccMU/z1LCwvu1AY3kffuSLAR5Q==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-search": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.39.0.tgz",
- "integrity": "sha512-/IYpF10BpthGZEJQZMhMqV4AqWr5avcWfZm/SIKK1RvUDmzGqLoW/+xeJVX9C8ZnNkIC8hivbIQFaNaRw0BFZQ==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.48.0.tgz",
+ "integrity": "sha512-RB9bKgYTVUiOcEb5bOcZ169jiiVW811dCsJoLT19DcbbFmU4QaK0ghSTssij35QBQ3SCOitXOUrHcGgNVwS7sQ==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
@@ -251,93 +184,93 @@
"license": "MIT"
},
"node_modules/@algolia/ingestion": {
- "version": "1.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.39.0.tgz",
- "integrity": "sha512-IgSHKUiuecqLfBlXiuCSdRTdsO3/yvpmXrMFz8fAJ8M4QmDtHkOuD769dmybRYqsbYMHivw+lir4BgbRGMtOIQ==",
+ "version": "1.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.48.0.tgz",
+ "integrity": "sha512-rhoSoPu+TDzDpvpk3cY/pYgbeWXr23DxnAIH/AkN0dUC+GCnVIeNSQkLaJ+CL4NZ51cjLIjksrzb4KC5Xu+ktw==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/monitoring": {
- "version": "1.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.39.0.tgz",
- "integrity": "sha512-8Xnd4+609SKC/hqVsuFc4evFBmvA2765/4NcH+Dpr756SKPbL1BY0X8kVxlmM3YBLNqnduSQxHxpDJUK58imCA==",
+ "version": "1.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.48.0.tgz",
+ "integrity": "sha512-aSe6jKvWt+8VdjOaq2ERtsXp9+qMXNJ3mTyTc1VMhNfgPl7ArOhRMRSQ8QBnY8ZL4yV5Xpezb7lAg8pdGrrulg==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/recommend": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.39.0.tgz",
- "integrity": "sha512-D7Ye2Ss/5xqUkQUxKm/VqEJLt5kARd9IMmjdzlxaKhGgNlOemTay0lwBmOVFuJRp7UODjp5c9+K+B8g0ORObIw==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.48.0.tgz",
+ "integrity": "sha512-p9tfI1bimAaZrdiVExL/dDyGUZ8gyiSHsktP1ZWGzt5hXpM3nhv4tSjyHtXjEKtA0UvsaHKwSfFE8aAAm1eIQA==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "@algolia/client-common": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-browser-xhr": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.39.0.tgz",
- "integrity": "sha512-mgPte1ZJqpk9dkVs44J3wKAbHATvHZNlSpzhMdjMLIg/3qTycSZyDiomLiSlxE8CLsxyBAOJWnyKRHfom+Z1rg==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.48.0.tgz",
+ "integrity": "sha512-XshyfpsQB7BLnHseMinp3fVHOGlTv6uEHOzNK/3XrEF9mjxoZAcdVfY1OCXObfwRWX5qXZOq8FnrndFd44iVsQ==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0"
+ "@algolia/client-common": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-fetch": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.39.0.tgz",
- "integrity": "sha512-LIrCkrxu1WnO3ev1+w6NnZ12JZL/o+2H9w6oWnZAjQZIlA/Ym6M9QHkt+OQ/SwkuoiNkW3DAo+Pi4A2V9FPtqg==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.48.0.tgz",
+ "integrity": "sha512-Q4XNSVQU89bKNAPuvzSYqTH9AcbOOiIo6AeYMQTxgSJ2+uvT78CLPMG89RIIloYuAtSfE07s40OLV50++l1Bbw==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0"
+ "@algolia/client-common": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-node-http": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.39.0.tgz",
- "integrity": "sha512-6beG+egPwXmvhAg+m0STCj+ZssDcjrLzf4L05aKm2nGglMXSSPz0cH/rM+kVD9krNfldiMctURd4wjojW1fV0w==",
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.48.0.tgz",
+ "integrity": "sha512-ZgxV2+5qt3NLeUYBTsi6PLyHcENQWC0iFppFZekHSEDA2wcLdTUjnaJzimTEULHIvJuLRCkUs4JABdhuJktEag==",
"license": "MIT",
"dependencies": {
- "@algolia/client-common": "5.39.0"
+ "@algolia/client-common": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@babel/code-frame": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
- "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz",
+ "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-validator-identifier": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.28.5",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
@@ -346,29 +279,29 @@
}
},
"node_modules/@babel/compat-data": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz",
- "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz",
+ "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/core": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz",
- "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.28.3",
- "@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-module-transforms": "^7.28.3",
- "@babel/helpers": "^7.28.4",
- "@babel/parser": "^7.28.4",
- "@babel/template": "^7.27.2",
- "@babel/traverse": "^7.28.4",
- "@babel/types": "^7.28.4",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz",
+ "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-module-transforms": "^7.28.6",
+ "@babel/helpers": "^7.28.6",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/traverse": "^7.29.0",
+ "@babel/types": "^7.29.0",
"@jridgewell/remapping": "^2.3.5",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
@@ -394,13 +327,13 @@
}
},
"node_modules/@babel/generator": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz",
- "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==",
+ "version": "7.29.1",
+ "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz",
+ "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==",
"license": "MIT",
"dependencies": {
- "@babel/parser": "^7.28.3",
- "@babel/types": "^7.28.2",
+ "@babel/parser": "^7.29.0",
+ "@babel/types": "^7.29.0",
"@jridgewell/gen-mapping": "^0.3.12",
"@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
@@ -422,12 +355,12 @@
}
},
"node_modules/@babel/helper-compilation-targets": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
- "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz",
+ "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==",
"license": "MIT",
"dependencies": {
- "@babel/compat-data": "^7.27.2",
+ "@babel/compat-data": "^7.28.6",
"@babel/helper-validator-option": "^7.27.1",
"browserslist": "^4.24.0",
"lru-cache": "^5.1.1",
@@ -447,17 +380,17 @@
}
},
"node_modules/@babel/helper-create-class-features-plugin": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz",
- "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.6.tgz",
+ "integrity": "sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==",
"license": "MIT",
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.27.3",
- "@babel/helper-member-expression-to-functions": "^7.27.1",
+ "@babel/helper-member-expression-to-functions": "^7.28.5",
"@babel/helper-optimise-call-expression": "^7.27.1",
- "@babel/helper-replace-supers": "^7.27.1",
+ "@babel/helper-replace-supers": "^7.28.6",
"@babel/helper-skip-transparent-expression-wrappers": "^7.27.1",
- "@babel/traverse": "^7.28.3",
+ "@babel/traverse": "^7.28.6",
"semver": "^6.3.1"
},
"engines": {
@@ -477,13 +410,13 @@
}
},
"node_modules/@babel/helper-create-regexp-features-plugin": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.27.1.tgz",
- "integrity": "sha512-uVDC72XVf8UbrH5qQTc18Agb8emwjTiZrQE11Nv3CuBEZmVvTwwE9CBUEvHku06gQCAyYf8Nv6ja1IN+6LMbxQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.28.5.tgz",
+ "integrity": "sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-annotate-as-pure": "^7.27.1",
- "regexpu-core": "^6.2.0",
+ "@babel/helper-annotate-as-pure": "^7.27.3",
+ "regexpu-core": "^6.3.1",
"semver": "^6.3.1"
},
"engines": {
@@ -503,16 +436,16 @@
}
},
"node_modules/@babel/helper-define-polyfill-provider": {
- "version": "0.6.5",
- "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz",
- "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==",
+ "version": "0.6.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.6.tgz",
+ "integrity": "sha512-mOAsxeeKkUKayvZR3HeTYD/fICpCPLJrU5ZjelT/PA6WHtNDBOE436YiaEUvHN454bRM3CebhDsIpieCc4texA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-plugin-utils": "^7.27.1",
- "debug": "^4.4.1",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "debug": "^4.4.3",
"lodash.debounce": "^4.0.8",
- "resolve": "^1.22.10"
+ "resolve": "^1.22.11"
},
"peerDependencies": {
"@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
@@ -528,40 +461,40 @@
}
},
"node_modules/@babel/helper-member-expression-to-functions": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz",
- "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz",
+ "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==",
"license": "MIT",
"dependencies": {
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/traverse": "^7.28.5",
+ "@babel/types": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-imports": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
- "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz",
+ "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==",
"license": "MIT",
"dependencies": {
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/traverse": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-transforms": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
- "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz",
+ "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-imports": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1",
- "@babel/traverse": "^7.28.3"
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -583,9 +516,9 @@
}
},
"node_modules/@babel/helper-plugin-utils": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz",
- "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz",
+ "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
@@ -609,14 +542,14 @@
}
},
"node_modules/@babel/helper-replace-supers": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz",
- "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.28.6.tgz",
+ "integrity": "sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-member-expression-to-functions": "^7.27.1",
+ "@babel/helper-member-expression-to-functions": "^7.28.5",
"@babel/helper-optimise-call-expression": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -648,9 +581,9 @@
}
},
"node_modules/@babel/helper-validator-identifier": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
- "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
+ "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
@@ -666,39 +599,39 @@
}
},
"node_modules/@babel/helper-wrap-function": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.28.3.tgz",
- "integrity": "sha512-zdf983tNfLZFletc0RRXYrHrucBEg95NIFMkn6K9dbeMYnsgHaSBGcQqdsCSStG2PYwRre0Qc2NNSCXbG+xc6g==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.28.6.tgz",
+ "integrity": "sha512-z+PwLziMNBeSQJonizz2AGnndLsP2DeGHIxDAn+wdHOGuo4Fo1x1HBPPXeE9TAOPHNNWQKCSlA2VZyYyyibDnQ==",
"license": "MIT",
"dependencies": {
- "@babel/template": "^7.27.2",
- "@babel/traverse": "^7.28.3",
- "@babel/types": "^7.28.2"
+ "@babel/template": "^7.28.6",
+ "@babel/traverse": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helpers": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
- "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz",
+ "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
"license": "MIT",
"dependencies": {
- "@babel/template": "^7.27.2",
- "@babel/types": "^7.28.4"
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz",
- "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz",
+ "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==",
"license": "MIT",
"dependencies": {
- "@babel/types": "^7.28.4"
+ "@babel/types": "^7.29.0"
},
"bin": {
"parser": "bin/babel-parser.js"
@@ -708,13 +641,13 @@
}
},
"node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.27.1.tgz",
- "integrity": "sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.28.5.tgz",
+ "integrity": "sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==",
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/traverse": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -771,13 +704,13 @@
}
},
"node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.28.3.tgz",
- "integrity": "sha512-b6YTX108evsvE4YgWyQ921ZAFFQm3Bn+CA3+ZXlNVnPhx+UfsVURoPjfGAPCjBgrqo30yX/C2nZGX96DxvR9Iw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.28.6.tgz",
+ "integrity": "sha512-a0aBScVTlNaiUe35UtfxAN7A/tehvvG4/ByO6+46VPKTRSlfnAFsgKy0FUh+qAkQrDTmhDkT+IBOKlOoMUxQ0g==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.28.3"
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -811,12 +744,12 @@
}
},
"node_modules/@babel/plugin-syntax-import-assertions": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz",
- "integrity": "sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.28.6.tgz",
+ "integrity": "sha512-pSJUpFHdx9z5nqTSirOCMtYVP2wFgoWhP0p3g8ONK/4IHhLIBd0B9NYqAvIUAhq+OkhO4VM1tENCt0cjlsNShw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -826,12 +759,12 @@
}
},
"node_modules/@babel/plugin-syntax-import-attributes": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz",
- "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.28.6.tgz",
+ "integrity": "sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -841,12 +774,12 @@
}
},
"node_modules/@babel/plugin-syntax-jsx": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz",
- "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz",
+ "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -856,12 +789,12 @@
}
},
"node_modules/@babel/plugin-syntax-typescript": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz",
- "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz",
+ "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -902,14 +835,14 @@
}
},
"node_modules/@babel/plugin-transform-async-generator-functions": {
- "version": "7.28.0",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.28.0.tgz",
- "integrity": "sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.29.0.tgz",
+ "integrity": "sha512-va0VdWro4zlBr2JsXC+ofCPB2iG12wPtVGTWFx2WLDOM3nYQZZIGP82qku2eW/JR83sD+k2k+CsNtyEbUqhU6w==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-remap-async-to-generator": "^7.27.1",
- "@babel/traverse": "^7.28.0"
+ "@babel/traverse": "^7.29.0"
},
"engines": {
"node": ">=6.9.0"
@@ -919,13 +852,13 @@
}
},
"node_modules/@babel/plugin-transform-async-to-generator": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.27.1.tgz",
- "integrity": "sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.28.6.tgz",
+ "integrity": "sha512-ilTRcmbuXjsMmcZ3HASTe4caH5Tpo93PkTxF9oG2VZsSWsahydmcEHhix9Ik122RcTnZnUzPbmux4wh1swfv7g==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-imports": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-remap-async-to-generator": "^7.27.1"
},
"engines": {
@@ -951,12 +884,12 @@
}
},
"node_modules/@babel/plugin-transform-block-scoping": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.4.tgz",
- "integrity": "sha512-1yxmvN0MJHOhPVmAsmoW5liWwoILobu/d/ShymZmj867bAdxGbehIrew1DuLpw2Ukv+qDSSPQdYW1dLNE7t11A==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.6.tgz",
+ "integrity": "sha512-tt/7wOtBmwHPNMPu7ax4pdPz6shjFrmHDghvNC+FG9Qvj7D6mJcoRQIF5dy4njmxR941l6rgtvfSB2zX3VlUIw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -966,13 +899,13 @@
}
},
"node_modules/@babel/plugin-transform-class-properties": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.27.1.tgz",
- "integrity": "sha512-D0VcalChDMtuRvJIu3U/fwWjf8ZMykz5iZsg77Nuj821vCKI3zCyRLwRdWbsuJ/uRwZhZ002QtCqIkwC/ZkvbA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.28.6.tgz",
+ "integrity": "sha512-dY2wS3I2G7D697VHndN91TJr8/AAfXQNt5ynCTI/MpxMsSzHp+52uNivYT5wCPax3whc47DR8Ba7cmlQMg24bw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-class-features-plugin": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -982,13 +915,13 @@
}
},
"node_modules/@babel/plugin-transform-class-static-block": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.28.3.tgz",
- "integrity": "sha512-LtPXlBbRoc4Njl/oh1CeD/3jC+atytbnf/UqLoqTDcEYGUPj022+rvfkbDYieUrSj3CaV4yHDByPE+T2HwfsJg==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.28.6.tgz",
+ "integrity": "sha512-rfQ++ghVwTWTqQ7w8qyDxL1XGihjBss4CmTgGRCTAC9RIbhVpyp4fOeZtta0Lbf+dTNIVJer6ych2ibHwkZqsQ==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.28.3",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-class-features-plugin": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -998,17 +931,17 @@
}
},
"node_modules/@babel/plugin-transform-classes": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.4.tgz",
- "integrity": "sha512-cFOlhIYPBv/iBoc+KS3M6et2XPtbT2HiCRfBXWtfpc9OAyostldxIf9YAYB6ypURBBbx+Qv6nyrLzASfJe+hBA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.6.tgz",
+ "integrity": "sha512-EF5KONAqC5zAqT783iMGuM2ZtmEBy+mJMOKl2BCvPZ2lVrwvXnB6o+OBWCS+CoeCCpVRF2sA2RBKUxvT8tQT5Q==",
"license": "MIT",
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.27.3",
- "@babel/helper-compilation-targets": "^7.27.2",
+ "@babel/helper-compilation-targets": "^7.28.6",
"@babel/helper-globals": "^7.28.0",
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/helper-replace-supers": "^7.27.1",
- "@babel/traverse": "^7.28.4"
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/helper-replace-supers": "^7.28.6",
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1018,13 +951,13 @@
}
},
"node_modules/@babel/plugin-transform-computed-properties": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz",
- "integrity": "sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.28.6.tgz",
+ "integrity": "sha512-bcc3k0ijhHbc2lEfpFHgx7eYw9KNXqOerKWfzbxEHUGKnS3sz9C4CNL9OiFN1297bDNfUiSO7DaLzbvHQQQ1BQ==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/template": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/template": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1034,13 +967,13 @@
}
},
"node_modules/@babel/plugin-transform-destructuring": {
- "version": "7.28.0",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.0.tgz",
- "integrity": "sha512-v1nrSMBiKcodhsyJ4Gf+Z0U/yawmJDBOTpEB3mcQY52r9RIyPneGyAS/yM6seP/8I+mWI3elOMtT5dB8GJVs+A==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.5.tgz",
+ "integrity": "sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==",
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.28.0"
+ "@babel/traverse": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1050,13 +983,13 @@
}
},
"node_modules/@babel/plugin-transform-dotall-regex": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.27.1.tgz",
- "integrity": "sha512-gEbkDVGRvjj7+T1ivxrfgygpT7GUd4vmODtYpbs0gZATdkX8/iSnOtZSxiZnsgm1YjTgjI6VKBGSJJevkrclzw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.28.6.tgz",
+ "integrity": "sha512-SljjowuNKB7q5Oayv4FoPzeB74g3QgLt8IVJw9ADvWy3QnUb/01aw8I4AVv8wYnPvQz2GDDZ/g3GhcNyDBI4Bg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1081,13 +1014,13 @@
}
},
"node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.27.1.tgz",
- "integrity": "sha512-hkGcueTEzuhB30B3eJCbCYeCaaEQOmQR0AdvzpD4LoN0GXMWzzGSuRrxR2xTnCrvNbVwK9N6/jQ92GSLfiZWoQ==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.29.0.tgz",
+ "integrity": "sha512-zBPcW2lFGxdiD8PUnPwJjag2J9otbcLQzvbiOzDxpYXyCuYX9agOwMPGn1prVH0a4qzhCKu24rlH4c1f7yA8rw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1112,13 +1045,13 @@
}
},
"node_modules/@babel/plugin-transform-explicit-resource-management": {
- "version": "7.28.0",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.0.tgz",
- "integrity": "sha512-K8nhUcn3f6iB+P3gwCv/no7OdzOZQcKchW6N389V6PD8NUWKZHzndOd9sPDVbMoBsbmjMqlB4L9fm+fEFNVlwQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.6.tgz",
+ "integrity": "sha512-Iao5Konzx2b6g7EPqTy40UZbcdXE126tTxVFr/nAIj+WItNxjKSYTEw3RC+A2/ZetmdJsgueL1KhaMCQHkLPIg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/plugin-transform-destructuring": "^7.28.0"
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/plugin-transform-destructuring": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1128,12 +1061,12 @@
}
},
"node_modules/@babel/plugin-transform-exponentiation-operator": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.27.1.tgz",
- "integrity": "sha512-uspvXnhHvGKf2r4VVtBpeFnuDWsJLQ6MF6lGJLC89jBR1uoVeqM416AZtTuhTezOfgHicpJQmoD5YUakO/YmXQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.28.6.tgz",
+ "integrity": "sha512-WitabqiGjV/vJ0aPOLSFfNY1u9U3R7W36B03r5I2KoNix+a3sOhJ3pKFB3R5It9/UiK78NiO0KE9P21cMhlPkw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1191,12 +1124,12 @@
}
},
"node_modules/@babel/plugin-transform-json-strings": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.27.1.tgz",
- "integrity": "sha512-6WVLVJiTjqcQauBhn1LkICsR2H+zm62I3h9faTDKt1qP4jn2o72tSvqMwtGFKGTpojce0gJs+76eZ2uCHRZh0Q==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.28.6.tgz",
+ "integrity": "sha512-Nr+hEN+0geQkzhbdgQVPoqr47lZbm+5fCUmO70722xJZd0Mvb59+33QLImGj6F+DkK3xgDi1YVysP8whD6FQAw==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1221,12 +1154,12 @@
}
},
"node_modules/@babel/plugin-transform-logical-assignment-operators": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.27.1.tgz",
- "integrity": "sha512-SJvDs5dXxiae4FbSL1aBJlG4wvl594N6YEVVn9e3JGulwioy6z3oPjx/sQBO3Y4NwUu5HNix6KJ3wBZoewcdbw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.28.6.tgz",
+ "integrity": "sha512-+anKKair6gpi8VsM/95kmomGNMD0eLz1NQ8+Pfw5sAwWH9fGYXT50E55ZpV0pHUHWf6IUTWPM+f/7AAff+wr9A==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1267,13 +1200,13 @@
}
},
"node_modules/@babel/plugin-transform-modules-commonjs": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz",
- "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.28.6.tgz",
+ "integrity": "sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-transforms": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-module-transforms": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1283,15 +1216,15 @@
}
},
"node_modules/@babel/plugin-transform-modules-systemjs": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.27.1.tgz",
- "integrity": "sha512-w5N1XzsRbc0PQStASMksmUeqECuzKuTJer7kFagK8AXgpCMkeDMO5S+aaFb7A51ZYDF7XI34qsTX+fkHiIm5yA==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.29.0.tgz",
+ "integrity": "sha512-PrujnVFbOdUpw4UHiVwKvKRLMMic8+eC0CuNlxjsyZUiBjhFdPsewdXCkveh2KqBA9/waD0W1b4hXSOBQJezpQ==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-transforms": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/helper-module-transforms": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "@babel/traverse": "^7.29.0"
},
"engines": {
"node": ">=6.9.0"
@@ -1317,13 +1250,13 @@
}
},
"node_modules/@babel/plugin-transform-named-capturing-groups-regex": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.27.1.tgz",
- "integrity": "sha512-SstR5JYy8ddZvD6MhV0tM/j16Qds4mIpJTOd1Yu9J9pJjH93bxHECF7pgtc28XvkzTD6Pxcm/0Z73Hvk7kb3Ng==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.29.0.tgz",
+ "integrity": "sha512-1CZQA5KNAD6ZYQLPw7oi5ewtDNxH/2vuCh+6SmvgDfhumForvs8a1o9n0UrEoBD8HU4djO2yWngTQlXl1NDVEQ==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1348,12 +1281,12 @@
}
},
"node_modules/@babel/plugin-transform-nullish-coalescing-operator": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.27.1.tgz",
- "integrity": "sha512-aGZh6xMo6q9vq1JGcw58lZ1Z0+i0xB2x0XaauNIUXd6O1xXc3RwoWEBlsTQrY4KQ9Jf0s5rgD6SiNkaUdJegTA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.28.6.tgz",
+ "integrity": "sha512-3wKbRgmzYbw24mDJXT7N+ADXw8BC/imU9yo9c9X9NKaLF1fW+e5H1U5QjMUBe4Qo4Ox/o++IyUkl1sVCLgevKg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1363,12 +1296,12 @@
}
},
"node_modules/@babel/plugin-transform-numeric-separator": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.27.1.tgz",
- "integrity": "sha512-fdPKAcujuvEChxDBJ5c+0BTaS6revLV7CJL08e4m3de8qJfNIuCc2nc7XJYOjBoTMJeqSmwXJ0ypE14RCjLwaw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.28.6.tgz",
+ "integrity": "sha512-SJR8hPynj8outz+SlStQSwvziMN4+Bq99it4tMIf5/Caq+3iOc0JtKyse8puvyXkk3eFRIA5ID/XfunGgO5i6w==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1378,16 +1311,16 @@
}
},
"node_modules/@babel/plugin-transform-object-rest-spread": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.4.tgz",
- "integrity": "sha512-373KA2HQzKhQCYiRVIRr+3MjpCObqzDlyrM6u4I201wL8Mp2wHf7uB8GhDwis03k2ti8Zr65Zyyqs1xOxUF/Ew==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.6.tgz",
+ "integrity": "sha512-5rh+JR4JBC4pGkXLAcYdLHZjXudVxWMXbB6u6+E9lRL5TrGVbHt1TjxGbZ8CkmYw9zjkB7jutzOROArsqtncEA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/plugin-transform-destructuring": "^7.28.0",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/plugin-transform-destructuring": "^7.28.5",
"@babel/plugin-transform-parameters": "^7.27.7",
- "@babel/traverse": "^7.28.4"
+ "@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1413,12 +1346,12 @@
}
},
"node_modules/@babel/plugin-transform-optional-catch-binding": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.27.1.tgz",
- "integrity": "sha512-txEAEKzYrHEX4xSZN4kJ+OfKXFVSWKB2ZxM9dpcE3wT7smwkNmXo5ORRlVzMVdJbD+Q8ILTgSD7959uj+3Dm3Q==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.28.6.tgz",
+ "integrity": "sha512-R8ja/Pyrv0OGAvAXQhSTmWyPJPml+0TMqXlO5w+AsMEiwb2fg3WkOvob7UxFSL3OIttFSGSRFKQsOhJ/X6HQdQ==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1428,12 +1361,12 @@
}
},
"node_modules/@babel/plugin-transform-optional-chaining": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.27.1.tgz",
- "integrity": "sha512-BQmKPPIuc8EkZgNKsv0X4bPmOoayeu4F1YCwx2/CfmDSXDbp7GnzlUH+/ul5VGfRg1AoFPsrIThlEBj2xb4CAg==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.28.6.tgz",
+ "integrity": "sha512-A4zobikRGJTsX9uqVFdafzGkqD30t26ck2LmOzAuLL8b2x6k3TIqRiT2xVvA9fNmFeTX484VpsdgmKNA0bS23w==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-skip-transparent-expression-wrappers": "^7.27.1"
},
"engines": {
@@ -1459,13 +1392,13 @@
}
},
"node_modules/@babel/plugin-transform-private-methods": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.27.1.tgz",
- "integrity": "sha512-10FVt+X55AjRAYI9BrdISN9/AQWHqldOeZDUoLyif1Kn05a56xVBXb8ZouL8pZ9jem8QpXaOt8TS7RHUIS+GPA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.28.6.tgz",
+ "integrity": "sha512-piiuapX9CRv7+0st8lmuUlRSmX6mBcVeNQ1b4AYzJxfCMuBfB0vBXDiGSmm03pKJw1v6cZ8KSeM+oUnM6yAExg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-class-features-plugin": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1475,14 +1408,14 @@
}
},
"node_modules/@babel/plugin-transform-private-property-in-object": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.27.1.tgz",
- "integrity": "sha512-5J+IhqTi1XPa0DXF83jYOaARrX+41gOewWbkPyjMNRDqgOCqdffGh8L3f/Ek5utaEBZExjSAzcyjmV9SSAWObQ==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.28.6.tgz",
+ "integrity": "sha512-b97jvNSOb5+ehyQmBpmhOCiUC5oVK4PMnpRvO7+ymFBoqYjeDHIU9jnrNUuwHOiL9RpGDoKBpSViarV+BU+eVA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-annotate-as-pure": "^7.27.1",
- "@babel/helper-create-class-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-annotate-as-pure": "^7.27.3",
+ "@babel/helper-create-class-features-plugin": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1537,16 +1470,16 @@
}
},
"node_modules/@babel/plugin-transform-react-jsx": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.27.1.tgz",
- "integrity": "sha512-2KH4LWGSrJIkVf5tSiBFYuXDAoWRq2MMwgivCf+93dd0GQi8RXLjKA/0EvRnVV5G0hrHczsquXuD01L8s6dmBw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.28.6.tgz",
+ "integrity": "sha512-61bxqhiRfAACulXSLd/GxqmAedUSrRZIu/cbaT18T1CetkTmtDN15it7i80ru4DVqRK1WMxQhXs+Lf9kajm5Ow==",
"license": "MIT",
"dependencies": {
- "@babel/helper-annotate-as-pure": "^7.27.1",
- "@babel/helper-module-imports": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1",
- "@babel/plugin-syntax-jsx": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/helper-annotate-as-pure": "^7.27.3",
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
+ "@babel/plugin-syntax-jsx": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1587,12 +1520,12 @@
}
},
"node_modules/@babel/plugin-transform-regenerator": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.4.tgz",
- "integrity": "sha512-+ZEdQlBoRg9m2NnzvEeLgtvBMO4tkFBw5SQIUgLICgTrumLoU7lr+Oghi6km2PFj+dbUt2u1oby2w3BDO9YQnA==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.29.0.tgz",
+ "integrity": "sha512-FijqlqMA7DmRdg/aINBSs04y8XNTYw/lr1gJ2WsmBnnaNw1iS43EPkJW+zK7z65auG3AWRFXWj+NcTQwYptUog==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1602,13 +1535,13 @@
}
},
"node_modules/@babel/plugin-transform-regexp-modifiers": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.27.1.tgz",
- "integrity": "sha512-TtEciroaiODtXvLZv4rmfMhkCv8jx3wgKpL68PuiPh2M4fvz5jhsA7697N1gMvkvr/JTF13DrFYyEbY9U7cVPA==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.28.6.tgz",
+ "integrity": "sha512-QGWAepm9qxpaIs7UM9FvUSnCGlb8Ua1RhyM4/veAxLwt3gMat/LSGrZixyuj4I6+Kn9iwvqCyPTtbdxanYoWYg==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1633,13 +1566,13 @@
}
},
"node_modules/@babel/plugin-transform-runtime": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.28.3.tgz",
- "integrity": "sha512-Y6ab1kGqZ0u42Zv/4a7l0l72n9DKP/MKoKWaUSBylrhNZO2prYuqFOLbn5aW5SIFXwSH93yfjbgllL8lxuGKLg==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.29.0.tgz",
+ "integrity": "sha512-jlaRT5dJtMaMCV6fAuLbsQMSwz/QkvaHOHOSXRitGGwSpR1blCY4KUKoyP2tYO8vJcqYe8cEj96cqSztv3uF9w==",
"license": "MIT",
"dependencies": {
- "@babel/helper-module-imports": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
"babel-plugin-polyfill-corejs2": "^0.4.14",
"babel-plugin-polyfill-corejs3": "^0.13.0",
"babel-plugin-polyfill-regenerator": "^0.6.5",
@@ -1677,12 +1610,12 @@
}
},
"node_modules/@babel/plugin-transform-spread": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz",
- "integrity": "sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.28.6.tgz",
+ "integrity": "sha512-9U4QObUC0FtJl05AsUcodau/RWDytrU6uKgkxu09mLR9HLDAtUMoPuuskm5huQsoktmsYpI+bGmq+iapDcriKA==",
"license": "MIT",
"dependencies": {
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-skip-transparent-expression-wrappers": "^7.27.1"
},
"engines": {
@@ -1738,16 +1671,16 @@
}
},
"node_modules/@babel/plugin-transform-typescript": {
- "version": "7.28.0",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz",
- "integrity": "sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.6.tgz",
+ "integrity": "sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==",
"license": "MIT",
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.27.3",
- "@babel/helper-create-class-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/helper-create-class-features-plugin": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-skip-transparent-expression-wrappers": "^7.27.1",
- "@babel/plugin-syntax-typescript": "^7.27.1"
+ "@babel/plugin-syntax-typescript": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1772,13 +1705,13 @@
}
},
"node_modules/@babel/plugin-transform-unicode-property-regex": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.27.1.tgz",
- "integrity": "sha512-uW20S39PnaTImxp39O5qFlHLS9LJEmANjMG7SxIhap8rCHqu0Ik+tLEPX5DKmHn6CsWQ7j3lix2tFOa5YtL12Q==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.28.6.tgz",
+ "integrity": "sha512-4Wlbdl/sIZjzi/8St0evF0gEZrgOswVO6aOzqxh1kDZOl9WmLrHq2HtGhnOJZmHZYKP8WZ1MDLCt5DAWwRo57A==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1804,13 +1737,13 @@
}
},
"node_modules/@babel/plugin-transform-unicode-sets-regex": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.27.1.tgz",
- "integrity": "sha512-EtkOujbc4cgvb0mlpQefi4NTPBzhSIevblFevACNLUspmrALgmEBdL/XfnyyITfd8fKBZrZys92zOWcik7j9Tw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.28.6.tgz",
+ "integrity": "sha512-/wHc/paTUmsDYN7SZkpWxogTOBNnlx7nBQYfy6JJlCT7G3mVhltk3e++N7zV0XfgGsrqBxd4rJQt9H16I21Y1Q==",
"license": "MIT",
"dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.27.1",
- "@babel/helper-plugin-utils": "^7.27.1"
+ "@babel/helper-create-regexp-features-plugin": "^7.28.5",
+ "@babel/helper-plugin-utils": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
@@ -1820,80 +1753,80 @@
}
},
"node_modules/@babel/preset-env": {
- "version": "7.28.3",
- "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.28.3.tgz",
- "integrity": "sha512-ROiDcM+GbYVPYBOeCR6uBXKkQpBExLl8k9HO1ygXEyds39j+vCCsjmj7S8GOniZQlEs81QlkdJZe76IpLSiqpg==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.29.0.tgz",
+ "integrity": "sha512-fNEdfc0yi16lt6IZo2Qxk3knHVdfMYX33czNb4v8yWhemoBhibCpQK/uYHtSKIiO+p/zd3+8fYVXhQdOVV608w==",
"license": "MIT",
"dependencies": {
- "@babel/compat-data": "^7.28.0",
- "@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/compat-data": "^7.29.0",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-plugin-utils": "^7.28.6",
"@babel/helper-validator-option": "^7.27.1",
- "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.27.1",
+ "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.28.5",
"@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1",
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1",
"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1",
- "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.28.3",
+ "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.28.6",
"@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2",
- "@babel/plugin-syntax-import-assertions": "^7.27.1",
- "@babel/plugin-syntax-import-attributes": "^7.27.1",
+ "@babel/plugin-syntax-import-assertions": "^7.28.6",
+ "@babel/plugin-syntax-import-attributes": "^7.28.6",
"@babel/plugin-syntax-unicode-sets-regex": "^7.18.6",
"@babel/plugin-transform-arrow-functions": "^7.27.1",
- "@babel/plugin-transform-async-generator-functions": "^7.28.0",
- "@babel/plugin-transform-async-to-generator": "^7.27.1",
+ "@babel/plugin-transform-async-generator-functions": "^7.29.0",
+ "@babel/plugin-transform-async-to-generator": "^7.28.6",
"@babel/plugin-transform-block-scoped-functions": "^7.27.1",
- "@babel/plugin-transform-block-scoping": "^7.28.0",
- "@babel/plugin-transform-class-properties": "^7.27.1",
- "@babel/plugin-transform-class-static-block": "^7.28.3",
- "@babel/plugin-transform-classes": "^7.28.3",
- "@babel/plugin-transform-computed-properties": "^7.27.1",
- "@babel/plugin-transform-destructuring": "^7.28.0",
- "@babel/plugin-transform-dotall-regex": "^7.27.1",
+ "@babel/plugin-transform-block-scoping": "^7.28.6",
+ "@babel/plugin-transform-class-properties": "^7.28.6",
+ "@babel/plugin-transform-class-static-block": "^7.28.6",
+ "@babel/plugin-transform-classes": "^7.28.6",
+ "@babel/plugin-transform-computed-properties": "^7.28.6",
+ "@babel/plugin-transform-destructuring": "^7.28.5",
+ "@babel/plugin-transform-dotall-regex": "^7.28.6",
"@babel/plugin-transform-duplicate-keys": "^7.27.1",
- "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.27.1",
+ "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.29.0",
"@babel/plugin-transform-dynamic-import": "^7.27.1",
- "@babel/plugin-transform-explicit-resource-management": "^7.28.0",
- "@babel/plugin-transform-exponentiation-operator": "^7.27.1",
+ "@babel/plugin-transform-explicit-resource-management": "^7.28.6",
+ "@babel/plugin-transform-exponentiation-operator": "^7.28.6",
"@babel/plugin-transform-export-namespace-from": "^7.27.1",
"@babel/plugin-transform-for-of": "^7.27.1",
"@babel/plugin-transform-function-name": "^7.27.1",
- "@babel/plugin-transform-json-strings": "^7.27.1",
+ "@babel/plugin-transform-json-strings": "^7.28.6",
"@babel/plugin-transform-literals": "^7.27.1",
- "@babel/plugin-transform-logical-assignment-operators": "^7.27.1",
+ "@babel/plugin-transform-logical-assignment-operators": "^7.28.6",
"@babel/plugin-transform-member-expression-literals": "^7.27.1",
"@babel/plugin-transform-modules-amd": "^7.27.1",
- "@babel/plugin-transform-modules-commonjs": "^7.27.1",
- "@babel/plugin-transform-modules-systemjs": "^7.27.1",
+ "@babel/plugin-transform-modules-commonjs": "^7.28.6",
+ "@babel/plugin-transform-modules-systemjs": "^7.29.0",
"@babel/plugin-transform-modules-umd": "^7.27.1",
- "@babel/plugin-transform-named-capturing-groups-regex": "^7.27.1",
+ "@babel/plugin-transform-named-capturing-groups-regex": "^7.29.0",
"@babel/plugin-transform-new-target": "^7.27.1",
- "@babel/plugin-transform-nullish-coalescing-operator": "^7.27.1",
- "@babel/plugin-transform-numeric-separator": "^7.27.1",
- "@babel/plugin-transform-object-rest-spread": "^7.28.0",
+ "@babel/plugin-transform-nullish-coalescing-operator": "^7.28.6",
+ "@babel/plugin-transform-numeric-separator": "^7.28.6",
+ "@babel/plugin-transform-object-rest-spread": "^7.28.6",
"@babel/plugin-transform-object-super": "^7.27.1",
- "@babel/plugin-transform-optional-catch-binding": "^7.27.1",
- "@babel/plugin-transform-optional-chaining": "^7.27.1",
+ "@babel/plugin-transform-optional-catch-binding": "^7.28.6",
+ "@babel/plugin-transform-optional-chaining": "^7.28.6",
"@babel/plugin-transform-parameters": "^7.27.7",
- "@babel/plugin-transform-private-methods": "^7.27.1",
- "@babel/plugin-transform-private-property-in-object": "^7.27.1",
+ "@babel/plugin-transform-private-methods": "^7.28.6",
+ "@babel/plugin-transform-private-property-in-object": "^7.28.6",
"@babel/plugin-transform-property-literals": "^7.27.1",
- "@babel/plugin-transform-regenerator": "^7.28.3",
- "@babel/plugin-transform-regexp-modifiers": "^7.27.1",
+ "@babel/plugin-transform-regenerator": "^7.29.0",
+ "@babel/plugin-transform-regexp-modifiers": "^7.28.6",
"@babel/plugin-transform-reserved-words": "^7.27.1",
"@babel/plugin-transform-shorthand-properties": "^7.27.1",
- "@babel/plugin-transform-spread": "^7.27.1",
+ "@babel/plugin-transform-spread": "^7.28.6",
"@babel/plugin-transform-sticky-regex": "^7.27.1",
"@babel/plugin-transform-template-literals": "^7.27.1",
"@babel/plugin-transform-typeof-symbol": "^7.27.1",
"@babel/plugin-transform-unicode-escapes": "^7.27.1",
- "@babel/plugin-transform-unicode-property-regex": "^7.27.1",
+ "@babel/plugin-transform-unicode-property-regex": "^7.28.6",
"@babel/plugin-transform-unicode-regex": "^7.27.1",
- "@babel/plugin-transform-unicode-sets-regex": "^7.27.1",
+ "@babel/plugin-transform-unicode-sets-regex": "^7.28.6",
"@babel/preset-modules": "0.1.6-no-external-plugins",
- "babel-plugin-polyfill-corejs2": "^0.4.14",
- "babel-plugin-polyfill-corejs3": "^0.13.0",
- "babel-plugin-polyfill-regenerator": "^0.6.5",
- "core-js-compat": "^3.43.0",
+ "babel-plugin-polyfill-corejs2": "^0.4.15",
+ "babel-plugin-polyfill-corejs3": "^0.14.0",
+ "babel-plugin-polyfill-regenerator": "^0.6.6",
+ "core-js-compat": "^3.48.0",
"semver": "^6.3.1"
},
"engines": {
@@ -1903,6 +1836,19 @@
"@babel/core": "^7.0.0-0"
}
},
+ "node_modules/@babel/preset-env/node_modules/babel-plugin-polyfill-corejs3": {
+ "version": "0.14.0",
+ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.14.0.tgz",
+ "integrity": "sha512-AvDcMxJ34W4Wgy4KBIIePQTAOP1Ie2WFwkQp3dB7FQ/f0lI5+nM96zUnYEOE1P9sEg0es5VCP0HxiWu5fUHZAQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-define-polyfill-provider": "^0.6.6",
+ "core-js-compat": "^3.48.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
+ }
+ },
"node_modules/@babel/preset-env/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
@@ -1927,14 +1873,14 @@
}
},
"node_modules/@babel/preset-react": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.27.1.tgz",
- "integrity": "sha512-oJHWh2gLhU9dW9HHr42q0cI0/iHHXTLGe39qvpAZZzagHy0MzYLCnCVV0symeRvzmjHyVU7mw2K06E6u/JwbhA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.28.5.tgz",
+ "integrity": "sha512-Z3J8vhRq7CeLjdC58jLv4lnZ5RKFUJWqH5emvxmv9Hv3BD1T9R/Im713R4MTKwvFaV74ejZ3sM01LyEKk4ugNQ==",
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
"@babel/helper-validator-option": "^7.27.1",
- "@babel/plugin-transform-react-display-name": "^7.27.1",
+ "@babel/plugin-transform-react-display-name": "^7.28.0",
"@babel/plugin-transform-react-jsx": "^7.27.1",
"@babel/plugin-transform-react-jsx-development": "^7.27.1",
"@babel/plugin-transform-react-pure-annotations": "^7.27.1"
@@ -1947,16 +1893,16 @@
}
},
"node_modules/@babel/preset-typescript": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz",
- "integrity": "sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.28.5.tgz",
+ "integrity": "sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==",
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
"@babel/helper-validator-option": "^7.27.1",
"@babel/plugin-syntax-jsx": "^7.27.1",
"@babel/plugin-transform-modules-commonjs": "^7.27.1",
- "@babel/plugin-transform-typescript": "^7.27.1"
+ "@babel/plugin-transform-typescript": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1975,43 +1921,43 @@
}
},
"node_modules/@babel/runtime-corejs3": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.28.4.tgz",
- "integrity": "sha512-h7iEYiW4HebClDEhtvFObtPmIvrd1SSfpI9EhOeKk4CtIK/ngBWFpuhCzhdmRKtg71ylcue+9I6dv54XYO1epQ==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.29.0.tgz",
+ "integrity": "sha512-TgUkdp71C9pIbBcHudc+gXZnihEDOjUAmXO1VO4HHGES7QLZcShR0stfKIxLSNIYx2fqhmJChOjm/wkF8wv4gA==",
"license": "MIT",
"dependencies": {
- "core-js-pure": "^3.43.0"
+ "core-js-pure": "^3.48.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/template": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
- "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
+ "version": "7.28.6",
+ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz",
+ "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
"license": "MIT",
"dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/parser": "^7.27.2",
- "@babel/types": "^7.27.1"
+ "@babel/code-frame": "^7.28.6",
+ "@babel/parser": "^7.28.6",
+ "@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz",
- "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz",
+ "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==",
"license": "MIT",
"dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.28.3",
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
"@babel/helper-globals": "^7.28.0",
- "@babel/parser": "^7.28.4",
- "@babel/template": "^7.27.2",
- "@babel/types": "^7.28.4",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.29.0",
"debug": "^4.3.1"
},
"engines": {
@@ -2019,13 +1965,13 @@
}
},
"node_modules/@babel/types": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz",
- "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==",
+ "version": "7.29.0",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz",
+ "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==",
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1"
+ "@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -2275,9 +2221,9 @@
}
},
"node_modules/@csstools/postcss-cascade-layers/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -2696,9 +2642,9 @@
}
},
"node_modules/@csstools/postcss-is-pseudo-class/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -2935,9 +2881,9 @@
}
},
"node_modules/@csstools/postcss-normalize-display-values": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.0.tgz",
- "integrity": "sha512-HlEoG0IDRoHXzXnkV4in47dzsxdsjdz6+j7MLjaACABX2NfvjFS6XVAnpaDyGesz9gK2SC7MbNwdCHusObKJ9Q==",
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.1.tgz",
+ "integrity": "sha512-TQUGBuRvxdc7TgNSTevYqrL8oItxiwPDixk20qCB5me/W8uF7BPbhRrAvFuhEoywQp/woRsUZ6SJ+sU5idZAIA==",
"funding": [
{
"type": "github",
@@ -2988,6 +2934,28 @@
"postcss": "^8.4"
}
},
+ "node_modules/@csstools/postcss-position-area-property": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-position-area-property/-/postcss-position-area-property-1.0.0.tgz",
+ "integrity": "sha512-fUP6KR8qV2NuUZV3Cw8itx0Ep90aRjAZxAEzC3vrl6yjFv+pFsQbR18UuQctEKmA72K9O27CoYiKEgXxkqjg8Q==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
"node_modules/@csstools/postcss-progressive-custom-properties": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-4.2.1.tgz",
@@ -3013,6 +2981,32 @@
"postcss": "^8.4"
}
},
+ "node_modules/@csstools/postcss-property-rule-prelude-list": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-property-rule-prelude-list/-/postcss-property-rule-prelude-list-1.0.0.tgz",
+ "integrity": "sha512-IxuQjUXq19fobgmSSvUDO7fVwijDJaZMvWQugxfEUxmjBeDCVaDuMpsZ31MsTm5xbnhA+ElDi0+rQ7sQQGisFA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
"node_modules/@csstools/postcss-random-function": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@csstools/postcss-random-function/-/postcss-random-function-2.0.1.tgz",
@@ -3095,9 +3089,9 @@
}
},
"node_modules/@csstools/postcss-scope-pseudo-class/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -3161,10 +3155,10 @@
"postcss": "^8.4"
}
},
- "node_modules/@csstools/postcss-text-decoration-shorthand": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.3.tgz",
- "integrity": "sha512-KSkGgZfx0kQjRIYnpsD7X2Om9BUXX/Kii77VBifQW9Ih929hK0KNjVngHDH0bFB9GmfWcR9vJYJJRvw/NQjkrA==",
+ "node_modules/@csstools/postcss-syntax-descriptor-syntax-production": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-syntax-descriptor-syntax-production/-/postcss-syntax-descriptor-syntax-production-1.0.1.tgz",
+ "integrity": "sha512-GneqQWefjM//f4hJ/Kbox0C6f2T7+pi4/fqTqOFGTL3EjnvOReTqO1qUQ30CaUjkwjYq9qZ41hzarrAxCc4gow==",
"funding": [
{
"type": "github",
@@ -3177,8 +3171,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/color-helpers": "^5.1.0",
- "postcss-value-parser": "^4.2.0"
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -3187,10 +3180,10 @@
"postcss": "^8.4"
}
},
- "node_modules/@csstools/postcss-trigonometric-functions": {
- "version": "4.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.9.tgz",
- "integrity": "sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==",
+ "node_modules/@csstools/postcss-system-ui-font-family": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-system-ui-font-family/-/postcss-system-ui-font-family-1.0.0.tgz",
+ "integrity": "sha512-s3xdBvfWYfoPSBsikDXbuorcMG1nN1M6GdU0qBsGfcmNR0A/qhloQZpTxjA3Xsyrk1VJvwb2pOfiOT3at/DuIQ==",
"funding": [
{
"type": "github",
@@ -3203,7 +3196,6 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.4",
"@csstools/css-parser-algorithms": "^3.0.5",
"@csstools/css-tokenizer": "^3.0.4"
},
@@ -3214,10 +3206,10 @@
"postcss": "^8.4"
}
},
- "node_modules/@csstools/postcss-unset-value": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-unset-value/-/postcss-unset-value-4.0.0.tgz",
- "integrity": "sha512-cBz3tOCI5Fw6NIFEwU3RiwK6mn3nKegjpJuzCndoGq3BZPkUjnsq7uQmIeMNeMbMk7YD2MfKcgCpZwX5jyXqCA==",
+ "node_modules/@csstools/postcss-text-decoration-shorthand": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.3.tgz",
+ "integrity": "sha512-KSkGgZfx0kQjRIYnpsD7X2Om9BUXX/Kii77VBifQW9Ih929hK0KNjVngHDH0bFB9GmfWcR9vJYJJRvw/NQjkrA==",
"funding": [
{
"type": "github",
@@ -3229,6 +3221,10 @@
}
],
"license": "MIT-0",
+ "dependencies": {
+ "@csstools/color-helpers": "^5.1.0",
+ "postcss-value-parser": "^4.2.0"
+ },
"engines": {
"node": ">=18"
},
@@ -3236,10 +3232,59 @@
"postcss": "^8.4"
}
},
- "node_modules/@csstools/utilities": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@csstools/utilities/-/utilities-2.0.0.tgz",
- "integrity": "sha512-5VdOr0Z71u+Yp3ozOx8T11N703wIFGVRgOWbOZMKgglPJsWA54MRIoMNVMa7shUToIhx5J8vX4sOZgD2XiihiQ==",
+ "node_modules/@csstools/postcss-trigonometric-functions": {
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.9.tgz",
+ "integrity": "sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
+ "node_modules/@csstools/postcss-unset-value": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-unset-value/-/postcss-unset-value-4.0.0.tgz",
+ "integrity": "sha512-cBz3tOCI5Fw6NIFEwU3RiwK6mn3nKegjpJuzCndoGq3BZPkUjnsq7uQmIeMNeMbMk7YD2MfKcgCpZwX5jyXqCA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
+ "node_modules/@csstools/utilities": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@csstools/utilities/-/utilities-2.0.0.tgz",
+ "integrity": "sha512-5VdOr0Z71u+Yp3ozOx8T11N703wIFGVRgOWbOZMKgglPJsWA54MRIoMNVMa7shUToIhx5J8vX4sOZgD2XiihiQ==",
"funding": [
{
"type": "github",
@@ -3267,25 +3312,43 @@
"node": ">=10.0.0"
}
},
+ "node_modules/@docsearch/core": {
+ "version": "4.5.4",
+ "resolved": "https://registry.npmjs.org/@docsearch/core/-/core-4.5.4.tgz",
+ "integrity": "sha512-DbkfZbJyYAPFJtF71eAFOTQSy5z5c/hdSN0UrErORKDwXKLTJBR0c+5WxE5l+IKZx4xIaEa8RkrL7T28DTCOYw==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@types/react": ">= 16.8.0 < 20.0.0",
+ "react": ">= 16.8.0 < 20.0.0",
+ "react-dom": ">= 16.8.0 < 20.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ },
+ "react": {
+ "optional": true
+ },
+ "react-dom": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@docsearch/css": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-4.1.0.tgz",
- "integrity": "sha512-nuNKGjHj/FQeWgE9t+i83QD/V67QiaAmGY7xS9TVCRUiCqSljOgIKlsLoQZKKVwEG8f+OWKdznzZkJxGZ7d06A==",
+ "version": "4.5.4",
+ "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-4.5.4.tgz",
+ "integrity": "sha512-gzO4DJwyM9c4YEPHwaLV1nUCDC2N6yoh0QJj44dce2rcfN71mB+jpu3+F+Y/KMDF1EKV0C3m54leSWsraE94xg==",
"license": "MIT"
},
"node_modules/@docsearch/react": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-4.1.0.tgz",
- "integrity": "sha512-4GHI7TT3sJZ2Vs4Kjadv7vAkMrTsJqHvzvxO3JA7UT8iPRKaDottG5o5uNshPWhVVaBYPC35Ukf8bfCotGpjSg==",
+ "version": "4.5.4",
+ "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-4.5.4.tgz",
+ "integrity": "sha512-iBNFfvWoUFRUJmGQ/r+0AEp2OJgJMoYIKRiRcTDON0hObBRSLlrv2ktb7w3nc1MeNm1JIpbPA99i59TiIR49fA==",
"license": "MIT",
"dependencies": {
- "@ai-sdk/react": "^2.0.30",
"@algolia/autocomplete-core": "1.19.2",
- "@docsearch/css": "4.1.0",
- "ai": "^5.0.30",
- "algoliasearch": "^5.28.0",
- "marked": "^16.3.0",
- "zod": "^4.1.8"
+ "@docsearch/core": "4.5.4",
+ "@docsearch/css": "4.5.4"
},
"peerDependencies": {
"@types/react": ">= 16.8.0 < 20.0.0",
@@ -3309,9 +3372,9 @@
}
},
"node_modules/@docusaurus/babel": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/babel/-/babel-3.9.1.tgz",
- "integrity": "sha512-/uoi3oG+wvbVWNBRfPrzrEslOSeLxrQEyWMywK51TLDFTANqIRivzkMusudh5bdDty8fXzCYUT+tg5t697jYqg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/babel/-/babel-3.9.2.tgz",
+ "integrity": "sha512-GEANdi/SgER+L7Japs25YiGil/AUDnFFHaCGPBbundxoWtCkA2lmy7/tFmgED4y1htAy6Oi4wkJEQdGssnw9MA==",
"license": "MIT",
"dependencies": {
"@babel/core": "^7.25.9",
@@ -3324,8 +3387,8 @@
"@babel/runtime": "^7.25.9",
"@babel/runtime-corejs3": "^7.25.9",
"@babel/traverse": "^7.25.9",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/utils": "3.9.1",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
"babel-plugin-dynamic-import-node": "^2.3.3",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0"
@@ -3335,17 +3398,17 @@
}
},
"node_modules/@docusaurus/bundler": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/bundler/-/bundler-3.9.1.tgz",
- "integrity": "sha512-E1c9DgNmAz4NqbNtiJVp4UgjLtr8O01IgtXD/NDQ4PZaK8895cMiTOgb3k7mN0qX8A3lb8vqyrPJ842+yMpuUg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/bundler/-/bundler-3.9.2.tgz",
+ "integrity": "sha512-ZOVi6GYgTcsZcUzjblpzk3wH1Fya2VNpd5jtHoCCFcJlMQ1EYXZetfAnRHLcyiFeBABaI1ltTYbOBtH/gahGVA==",
"license": "MIT",
"dependencies": {
"@babel/core": "^7.25.9",
- "@docusaurus/babel": "3.9.1",
- "@docusaurus/cssnano-preset": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
+ "@docusaurus/babel": "3.9.2",
+ "@docusaurus/cssnano-preset": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
"babel-loader": "^9.2.1",
"clean-css": "^5.3.3",
"copy-webpack-plugin": "^11.0.0",
@@ -3378,18 +3441,18 @@
}
},
"node_modules/@docusaurus/core": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.9.1.tgz",
- "integrity": "sha512-FWDk1LIGD5UR5Zmm9rCrXRoxZUgbwuP6FBA7rc50DVfzqDOMkeMe3NyJhOsA2dF0zBE3VbHEIMmTjKwTZJwbaA==",
- "license": "MIT",
- "dependencies": {
- "@docusaurus/babel": "3.9.1",
- "@docusaurus/bundler": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.9.2.tgz",
+ "integrity": "sha512-HbjwKeC+pHUFBfLMNzuSjqFE/58+rLVKmOU3lxQrpsxLBOGosYco/Q0GduBb0/jEMRiyEqjNT/01rRdOMWq5pw==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/babel": "3.9.2",
+ "@docusaurus/bundler": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"boxen": "^6.2.1",
"chalk": "^4.1.2",
"chokidar": "^3.5.3",
@@ -3439,9 +3502,9 @@
}
},
"node_modules/@docusaurus/cssnano-preset": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.9.1.tgz",
- "integrity": "sha512-2y7+s7RWQMqBg+9ejeKwvZs7Bdw/hHIVJIodwMXbs2kr+S48AhcmAfdOh6Cwm0unJb0hJUshN0ROwRoQMwl3xg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.9.2.tgz",
+ "integrity": "sha512-8gBKup94aGttRduABsj7bpPFTX7kbwu+xh3K9NMCF5K4bWBqTFYW+REKHF6iBVDHRJ4grZdIPbvkiHd/XNKRMQ==",
"license": "MIT",
"dependencies": {
"cssnano-preset-advanced": "^6.1.2",
@@ -3454,9 +3517,9 @@
}
},
"node_modules/@docusaurus/logger": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.9.1.tgz",
- "integrity": "sha512-C9iFzXwHzwvGlisE4bZx+XQE0JIqlGAYAd5LzpR7fEDgjctu7yL8bE5U4nTNywXKHURDzMt4RJK8V6+stFHVkA==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.9.2.tgz",
+ "integrity": "sha512-/SVCc57ByARzGSU60c50rMyQlBuMIJCjcsJlkphxY6B0GV4UH3tcA1994N8fFfbJ9kX3jIBe/xg3XP5qBtGDbA==",
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2",
@@ -3467,14 +3530,14 @@
}
},
"node_modules/@docusaurus/mdx-loader": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.9.1.tgz",
- "integrity": "sha512-/1PY8lqry8jCt0qZddJSpc0U2sH6XC27kVJZfpA7o2TiQ3mdBQyH5AVbj/B2m682B1ounE+XjI0LdpOkAQLPoA==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.9.2.tgz",
+ "integrity": "sha512-wiYoGwF9gdd6rev62xDU8AAM8JuLI/hlwOtCzMmYcspEkzecKrP8J8X+KpYnTlACBUUtXNJpSoCwFWJhLRevzQ==",
"license": "MIT",
"dependencies": {
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@@ -3506,12 +3569,12 @@
}
},
"node_modules/@docusaurus/module-type-aliases": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.9.1.tgz",
- "integrity": "sha512-YBce3GbJGGcMbJTyHcnEOMvdXqg41pa5HsrMCGA5Rm4z0h0tHS6YtEldj0mlfQRhCG7Y0VD66t2tb87Aom+11g==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.9.2.tgz",
+ "integrity": "sha512-8qVe2QA9hVLzvnxP46ysuofJUIc/yYQ82tvA/rBTrnpXtCjNSFLxEZfd5U8cYZuJIVlkPxamsIgwd5tGZXfvew==",
"license": "MIT",
"dependencies": {
- "@docusaurus/types": "3.9.1",
+ "@docusaurus/types": "3.9.2",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -3525,19 +3588,19 @@
}
},
"node_modules/@docusaurus/plugin-content-blog": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.9.1.tgz",
- "integrity": "sha512-vT6kIimpJLWvW9iuWzH4u7VpTdsGlmn4yfyhq0/Kb1h4kf9uVouGsTmrD7WgtYBUG1P+TSmQzUUQa+ALBSRTig==",
- "license": "MIT",
- "dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/theme-common": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.9.2.tgz",
+ "integrity": "sha512-3I2HXy3L1QcjLJLGAoTvoBnpOwa6DPUa3Q0dMK19UTY9mhPkKQg/DYhAGTiBUKcTR0f08iw7kLPqOhIgdV3eVQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/theme-common": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"cheerio": "1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
@@ -3559,20 +3622,20 @@
}
},
"node_modules/@docusaurus/plugin-content-docs": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.9.1.tgz",
- "integrity": "sha512-DyLk9BIA6I9gPIuia8XIL+XIEbNnExam6AHzRsfrEq4zJr7k/DsWW7oi4aJMepDnL7jMRhpVcdsCxdjb0/A9xg==",
- "license": "MIT",
- "dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/module-type-aliases": "3.9.1",
- "@docusaurus/theme-common": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.9.2.tgz",
+ "integrity": "sha512-C5wZsGuKTY8jEYsqdxhhFOe1ZDjH0uIYJ9T/jebHwkyxqnr4wW0jTkB72OMqNjsoQRcb0JN3PcSeTwFlVgzCZg==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/module-type-aliases": "3.9.2",
+ "@docusaurus/theme-common": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",
@@ -3592,16 +3655,16 @@
}
},
"node_modules/@docusaurus/plugin-content-pages": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.9.1.tgz",
- "integrity": "sha512-/1wFzRnXYASI+Nv9ck9IVPIMw0O5BGQ8ZVhDzEwhkL+tl44ycvSnY6PIe6rW2HLxsw61Z3WFwAiU8+xMMtMZpg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.9.2.tgz",
+ "integrity": "sha512-s4849w/p4noXUrGpPUF0BPqIAfdAe76BLaRGAGKZ1gTDNiGxGcpsLcwJ9OTi1/V8A+AzvsmI9pkjie2zjIQZKA==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
@@ -3615,15 +3678,15 @@
}
},
"node_modules/@docusaurus/plugin-css-cascade-layers": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-css-cascade-layers/-/plugin-css-cascade-layers-3.9.1.tgz",
- "integrity": "sha512-/QyW2gRCk/XE3ttCK/ERIgle8KJ024dBNKMu6U5SmpJvuT2il1n5jR/48Pp/9wEwut8WVml4imNm6X8JsL5A0Q==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-css-cascade-layers/-/plugin-css-cascade-layers-3.9.2.tgz",
+ "integrity": "sha512-w1s3+Ss+eOQbscGM4cfIFBlVg/QKxyYgj26k5AnakuHkKxH6004ZtuLe5awMBotIYF2bbGDoDhpgQ4r/kcj4rQ==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -3631,14 +3694,14 @@
}
},
"node_modules/@docusaurus/plugin-debug": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.9.1.tgz",
- "integrity": "sha512-qPeAuk0LccC251d7jg2MRhNI+o7niyqa924oEM/AxnZJvIpMa596aAxkRImiAqNN6+gtLE1Hkrz/RHUH2HDGsA==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.9.2.tgz",
+ "integrity": "sha512-j7a5hWuAFxyQAkilZwhsQ/b3T7FfHZ+0dub6j/GxKNFJp2h9qk/P1Bp7vrGASnvA9KNQBBL1ZXTe7jlh4VdPdA==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
"fs-extra": "^11.1.1",
"react-json-view-lite": "^2.3.0",
"tslib": "^2.6.0"
@@ -3652,14 +3715,14 @@
}
},
"node_modules/@docusaurus/plugin-google-analytics": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.9.1.tgz",
- "integrity": "sha512-k4Qq2HphqOrIU/CevGPdEO1yJnWUI8m0zOJsYt5NfMJwNsIn/gDD6gv/DKD+hxHndQT5pacsfBd4BWHZVNVroQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.9.2.tgz",
+ "integrity": "sha512-mAwwQJ1Us9jL/lVjXtErXto4p4/iaLlweC54yDUK1a97WfkC6Z2k5/769JsFgwOwOP+n5mUQGACXOEQ0XDuVUw==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -3671,14 +3734,14 @@
}
},
"node_modules/@docusaurus/plugin-google-gtag": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.9.1.tgz",
- "integrity": "sha512-n9BURBiQyJKI/Ecz35IUjXYwXcgNCSq7/eA07+ZYcDiSyH2p/EjPf8q/QcZG3CyEJPZ/SzGkDHePfcVPahY4Gg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.9.2.tgz",
+ "integrity": "sha512-YJ4lDCphabBtw19ooSlc1MnxtYGpjFV9rEdzjLsUnBCeis2djUyCozZaFhCg6NGEwOn7HDDyMh0yzcdRpnuIvA==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"@types/gtag.js": "^0.0.12",
"tslib": "^2.6.0"
},
@@ -3691,14 +3754,14 @@
}
},
"node_modules/@docusaurus/plugin-google-tag-manager": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.9.1.tgz",
- "integrity": "sha512-rZAQZ25ZuXaThBajxzLjXieTDUCMmBzfAA6ThElQ3o7Q+LEpOjCIrwGFau0KLY9HeG6x91+FwwsAM8zeApYDrg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.9.2.tgz",
+ "integrity": "sha512-LJtIrkZN/tuHD8NqDAW1Tnw0ekOwRTfobWPsdO15YxcicBo2ykKF0/D6n0vVBfd3srwr9Z6rzrIWYrMzBGrvNw==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -3710,17 +3773,17 @@
}
},
"node_modules/@docusaurus/plugin-sitemap": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.9.1.tgz",
- "integrity": "sha512-k/bf5cXDxAJUYTzqatgFJwmZsLUbIgl6S8AdZMKGG2Mv2wcOHt+EQNN9qPyWZ5/9cFj+Q8f8DN+KQheBMYLong==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.9.2.tgz",
+ "integrity": "sha512-WLh7ymgDXjG8oPoM/T4/zUP7KcSuFYRZAUTl8vR6VzYkfc18GBM4xLhcT+AKOwun6kBivYKUJf+vlqYJkm+RHw==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"fs-extra": "^11.1.1",
"sitemap": "^7.1.1",
"tslib": "^2.6.0"
@@ -3734,15 +3797,15 @@
}
},
"node_modules/@docusaurus/plugin-svgr": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-svgr/-/plugin-svgr-3.9.1.tgz",
- "integrity": "sha512-TeZOXT2PSdTNR1OpDJMkYqFyX7MMhbd4t16hQByXksgZQCXNyw3Dio+KaDJ2Nj+LA4WkOvsk45bWgYG5MAaXSQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-svgr/-/plugin-svgr-3.9.2.tgz",
+ "integrity": "sha512-n+1DE+5b3Lnf27TgVU5jM1d4x5tUh2oW5LTsBxJX4PsAPV0JGcmI6p3yLYtEY0LRVEIJh+8RsdQmRE66wSV8mw==",
"license": "MIT",
"dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"@svgr/core": "8.1.0",
"@svgr/webpack": "^8.1.0",
"tslib": "^2.6.0",
@@ -3757,26 +3820,26 @@
}
},
"node_modules/@docusaurus/preset-classic": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.9.1.tgz",
- "integrity": "sha512-ZHga2xsxxsyd0dN1BpLj8S889Eu9eMBuj2suqxdw/vaaXu/FjJ8KEGbcaeo6nHPo8VQcBBnPEdkBtSDm2TfMNw==",
- "license": "MIT",
- "dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/plugin-content-blog": "3.9.1",
- "@docusaurus/plugin-content-docs": "3.9.1",
- "@docusaurus/plugin-content-pages": "3.9.1",
- "@docusaurus/plugin-css-cascade-layers": "3.9.1",
- "@docusaurus/plugin-debug": "3.9.1",
- "@docusaurus/plugin-google-analytics": "3.9.1",
- "@docusaurus/plugin-google-gtag": "3.9.1",
- "@docusaurus/plugin-google-tag-manager": "3.9.1",
- "@docusaurus/plugin-sitemap": "3.9.1",
- "@docusaurus/plugin-svgr": "3.9.1",
- "@docusaurus/theme-classic": "3.9.1",
- "@docusaurus/theme-common": "3.9.1",
- "@docusaurus/theme-search-algolia": "3.9.1",
- "@docusaurus/types": "3.9.1"
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.9.2.tgz",
+ "integrity": "sha512-IgyYO2Gvaigi21LuDIe+nvmN/dfGXAiMcV/murFqcpjnZc7jxFAxW+9LEjdPt61uZLxG4ByW/oUmX/DDK9t/8w==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/plugin-content-blog": "3.9.2",
+ "@docusaurus/plugin-content-docs": "3.9.2",
+ "@docusaurus/plugin-content-pages": "3.9.2",
+ "@docusaurus/plugin-css-cascade-layers": "3.9.2",
+ "@docusaurus/plugin-debug": "3.9.2",
+ "@docusaurus/plugin-google-analytics": "3.9.2",
+ "@docusaurus/plugin-google-gtag": "3.9.2",
+ "@docusaurus/plugin-google-tag-manager": "3.9.2",
+ "@docusaurus/plugin-sitemap": "3.9.2",
+ "@docusaurus/plugin-svgr": "3.9.2",
+ "@docusaurus/theme-classic": "3.9.2",
+ "@docusaurus/theme-common": "3.9.2",
+ "@docusaurus/theme-search-algolia": "3.9.2",
+ "@docusaurus/types": "3.9.2"
},
"engines": {
"node": ">=20.0"
@@ -3787,24 +3850,24 @@
}
},
"node_modules/@docusaurus/theme-classic": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.9.1.tgz",
- "integrity": "sha512-LrAIu/mQ04nG6s1cssC0TMmICD8twFIIn/hJ5Pd9uIPQvtKnyAKEn12RefopAul5KfMo9kixPaqogV5jIJr26w==",
- "license": "MIT",
- "dependencies": {
- "@docusaurus/core": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/module-type-aliases": "3.9.1",
- "@docusaurus/plugin-content-blog": "3.9.1",
- "@docusaurus/plugin-content-docs": "3.9.1",
- "@docusaurus/plugin-content-pages": "3.9.1",
- "@docusaurus/theme-common": "3.9.1",
- "@docusaurus/theme-translations": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.9.2.tgz",
+ "integrity": "sha512-IGUsArG5hhekXd7RDb11v94ycpJpFdJPkLnt10fFQWOVxAtq5/D7hT6lzc2fhyQKaaCE62qVajOMKL7OiAFAIA==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/module-type-aliases": "3.9.2",
+ "@docusaurus/plugin-content-blog": "3.9.2",
+ "@docusaurus/plugin-content-docs": "3.9.2",
+ "@docusaurus/plugin-content-pages": "3.9.2",
+ "@docusaurus/theme-common": "3.9.2",
+ "@docusaurus/theme-translations": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"infima": "0.2.0-alpha.45",
@@ -3827,15 +3890,15 @@
}
},
"node_modules/@docusaurus/theme-common": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.9.1.tgz",
- "integrity": "sha512-j9adi961F+6Ps9d0jcb5BokMcbjXAAJqKkV43eo8nh4YgmDj7KUNDX4EnOh/MjTQeO06oPY5cxp3yUXdW/8Ggw==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.9.2.tgz",
+ "integrity": "sha512-6c4DAbR6n6nPbnZhY2V3tzpnKnGL+6aOsLvFL26VRqhlczli9eWG0VDUNoCQEPnGwDMhPS42UhSAnz5pThm5Ag==",
"license": "MIT",
"dependencies": {
- "@docusaurus/mdx-loader": "3.9.1",
- "@docusaurus/module-type-aliases": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
+ "@docusaurus/mdx-loader": "3.9.2",
+ "@docusaurus/module-type-aliases": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -3855,19 +3918,19 @@
}
},
"node_modules/@docusaurus/theme-search-algolia": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.9.1.tgz",
- "integrity": "sha512-WjM28bzlgfT6nHlEJemkwyGVpvGsZWPireV/w+wZ1Uo64xCZ8lNOb4xwQRukDaLSed3oPBN0gSnu06l5VuCXHg==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.9.2.tgz",
+ "integrity": "sha512-GBDSFNwjnh5/LdkxCKQHkgO2pIMX1447BxYUBG2wBiajS21uj64a+gH/qlbQjDLxmGrbrllBrtJkUHxIsiwRnw==",
"license": "MIT",
"dependencies": {
"@docsearch/react": "^3.9.0 || ^4.1.0",
- "@docusaurus/core": "3.9.1",
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/plugin-content-docs": "3.9.1",
- "@docusaurus/theme-common": "3.9.1",
- "@docusaurus/theme-translations": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-validation": "3.9.1",
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/plugin-content-docs": "3.9.2",
+ "@docusaurus/theme-common": "3.9.2",
+ "@docusaurus/theme-translations": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-validation": "3.9.2",
"algoliasearch": "^5.37.0",
"algoliasearch-helper": "^3.26.0",
"clsx": "^2.0.0",
@@ -3886,9 +3949,9 @@
}
},
"node_modules/@docusaurus/theme-translations": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.9.1.tgz",
- "integrity": "sha512-mUQd49BSGKTiM6vP9+JFgRJL28lMIN3PUvXjF3rzuOHMByUZUBNwCt26Z23GkKiSIOrRkjKoaBNTipR/MHdYSQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.9.2.tgz",
+ "integrity": "sha512-vIryvpP18ON9T9rjgMRFLr2xJVDpw1rtagEGf8Ccce4CkTrvM/fRB8N2nyWYOW5u3DdjkwKw5fBa+3tbn9P4PA==",
"license": "MIT",
"dependencies": {
"fs-extra": "^11.1.1",
@@ -3899,16 +3962,16 @@
}
},
"node_modules/@docusaurus/tsconfig": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/tsconfig/-/tsconfig-3.9.1.tgz",
- "integrity": "sha512-stdzM1dNDgRO0OvxeznXlE3N1igUoeHPNJjiKqyffLizgpVgNXJBAWeG6fuoYiCH4udGUBqy2dyM+1+kG2/UPQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/tsconfig/-/tsconfig-3.9.2.tgz",
+ "integrity": "sha512-j6/Fp4Rlpxsc632cnRnl5HpOWeb6ZKssDj6/XzzAzVGXXfm9Eptx3rxCC+fDzySn9fHTS+CWJjPineCR1bB5WQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@docusaurus/types": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.9.1.tgz",
- "integrity": "sha512-ElekJ29sk39s5LTEZMByY1c2oH9FMtw7KbWFU3BtuQ1TytfIK39HhUivDEJvm5KCLyEnnfUZlvSNDXeyk0vzAA==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.9.2.tgz",
+ "integrity": "sha512-Ux1JUNswg+EfUEmajJjyhIohKceitY/yzjRUpu04WXgvVz+fbhVC0p+R0JhvEu4ytw8zIAys2hrdpQPBHRIa8Q==",
"license": "MIT",
"dependencies": {
"@mdx-js/mdx": "^3.0.0",
@@ -3942,14 +4005,14 @@
}
},
"node_modules/@docusaurus/utils": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.9.1.tgz",
- "integrity": "sha512-YAL4yhhWLl9DXuf5MVig260a6INz4MehrBGFU/CZu8yXmRiYEuQvRFWh9ZsjfAOyaG7za1MNmBVZ4VVAi/CiJA==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.9.2.tgz",
+ "integrity": "sha512-lBSBiRruFurFKXr5Hbsl2thmGweAPmddhF3jb99U4EMDA5L+e5Y1rAkOS07Nvrup7HUMBDrCV45meaxZnt28nQ==",
"license": "MIT",
"dependencies": {
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/types": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
"escape-string-regexp": "^4.0.0",
"execa": "5.1.1",
"file-loader": "^6.2.0",
@@ -3974,12 +4037,12 @@
}
},
"node_modules/@docusaurus/utils-common": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.9.1.tgz",
- "integrity": "sha512-4M1u5Q8Zn2CYL2TJ864M51FV4YlxyGyfC3x+7CLuR6xsyTVNBNU4QMcPgsTHRS9J2+X6Lq7MyH6hiWXyi/sXUQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.9.2.tgz",
+ "integrity": "sha512-I53UC1QctruA6SWLvbjbhCpAw7+X7PePoe5pYcwTOEXD/PxeP8LnECAhTHHwWCblyUX5bMi4QLRkxvyZ+IT8Aw==",
"license": "MIT",
"dependencies": {
- "@docusaurus/types": "3.9.1",
+ "@docusaurus/types": "3.9.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -3987,14 +4050,14 @@
}
},
"node_modules/@docusaurus/utils-validation": {
- "version": "3.9.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.9.1.tgz",
- "integrity": "sha512-5bzab5si3E1udrlZuVGR17857Lfwe8iFPoy5AvMP9PXqDfoyIKT7gDQgAmxdRDMurgHaJlyhXEHHdzDKkOxxZQ==",
+ "version": "3.9.2",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.9.2.tgz",
+ "integrity": "sha512-l7yk3X5VnNmATbwijJkexdhulNsQaNDwoagiwujXoxFbWLcxHQqNQ+c/IAlzrfMMOfa/8xSBZ7KEKDesE/2J7A==",
"license": "MIT",
"dependencies": {
- "@docusaurus/logger": "3.9.1",
- "@docusaurus/utils": "3.9.1",
- "@docusaurus/utils-common": "3.9.1",
+ "@docusaurus/logger": "3.9.2",
+ "@docusaurus/utils": "3.9.2",
+ "@docusaurus/utils-common": "3.9.2",
"fs-extra": "^11.2.0",
"joi": "^17.9.2",
"js-yaml": "^4.1.0",
@@ -4005,6 +4068,448 @@
"node": ">=20.0"
}
},
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz",
+ "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz",
+ "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz",
+ "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz",
+ "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz",
+ "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz",
+ "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz",
+ "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz",
+ "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz",
+ "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz",
+ "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz",
+ "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz",
+ "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz",
+ "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz",
+ "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz",
+ "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz",
+ "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz",
+ "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz",
+ "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openharmony-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz",
+ "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz",
+ "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz",
+ "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz",
+ "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz",
+ "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@hapi/hoek": {
"version": "9.3.0",
"resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz",
@@ -4020,6 +4525,27 @@
"@hapi/hoek": "^9.0.0"
}
},
+ "node_modules/@iconify/react": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/react/-/react-6.0.0.tgz",
+ "integrity": "sha512-eqNscABVZS8eCpZLU/L5F5UokMS9mnCf56iS1nM9YYHdH8ZxqZL9zyjSwW60IOQFsXZkilbBiv+1paMXBhSQnw==",
+ "license": "MIT",
+ "dependencies": {
+ "@iconify/types": "^2.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/cyberalien"
+ },
+ "peerDependencies": {
+ "react": ">=16"
+ }
+ },
+ "node_modules/@iconify/types": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz",
+ "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==",
+ "license": "MIT"
+ },
"node_modules/@jest/schemas": {
"version": "29.6.3",
"resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
@@ -4380,15 +4906,6 @@
"node": ">= 8"
}
},
- "node_modules/@opentelemetry/api": {
- "version": "1.9.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
- "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=8.0.0"
- }
- },
"node_modules/@pnpm/config.env-replace": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz",
@@ -4436,6 +4953,356 @@
"integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==",
"license": "MIT"
},
+ "node_modules/@rollup/rollup-android-arm-eabi": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz",
+ "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-android-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz",
+ "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz",
+ "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz",
+ "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz",
+ "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz",
+ "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz",
+ "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-musleabihf": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz",
+ "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz",
+ "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz",
+ "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz",
+ "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz",
+ "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz",
+ "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz",
+ "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz",
+ "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz",
+ "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-s390x-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz",
+ "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz",
+ "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz",
+ "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-openbsd-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz",
+ "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-openharmony-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz",
+ "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-arm64-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz",
+ "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-ia32-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz",
+ "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz",
+ "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz",
+ "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
"node_modules/@sideway/address": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz",
@@ -4458,9 +5325,9 @@
"license": "BSD-3-Clause"
},
"node_modules/@sinclair/typebox": {
- "version": "0.27.8",
- "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz",
- "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==",
+ "version": "0.27.10",
+ "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.10.tgz",
+ "integrity": "sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==",
"license": "MIT"
},
"node_modules/@sindresorhus/is": {
@@ -4475,6 +5342,18 @@
"url": "https://github.com/sindresorhus/is?sponsor=1"
}
},
+ "node_modules/@sindresorhus/merge-streams": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz",
+ "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/@slorber/remark-comment": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@slorber/remark-comment/-/remark-comment-1.0.0.tgz",
@@ -4490,6 +5369,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz",
"integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==",
+ "dev": true,
"license": "MIT"
},
"node_modules/@svgr/babel-plugin-add-jsx-attribute": {
@@ -4795,6 +5675,17 @@
"@types/node": "*"
}
},
+ "node_modules/@types/chai": {
+ "version": "5.2.3",
+ "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz",
+ "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/deep-eql": "*",
+ "assertion-error": "^2.0.1"
+ }
+ },
"node_modules/@types/connect": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
@@ -4823,6 +5714,13 @@
"@types/ms": "*"
}
},
+ "node_modules/@types/deep-eql": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz",
+ "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/eslint": {
"version": "9.6.1",
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz",
@@ -4960,6 +5858,12 @@
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
"license": "MIT"
},
+ "node_modules/@types/katex": {
+ "version": "0.16.8",
+ "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.8.tgz",
+ "integrity": "sha512-trgaNyfU+Xh2Tc+ABIb44a5AYUpicB3uwirOioeOkNPPbmgRNtcWyDeeFRzjPZENO9Vq8gvVqfhaaXWLlevVwg==",
+ "license": "MIT"
+ },
"node_modules/@types/mdast": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
@@ -5143,9 +6047,9 @@
}
},
"node_modules/@types/yargs": {
- "version": "17.0.33",
- "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz",
- "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==",
+ "version": "17.0.35",
+ "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz",
+ "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==",
"license": "MIT",
"dependencies": {
"@types/yargs-parser": "*"
@@ -5163,15 +6067,6 @@
"integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
"license": "ISC"
},
- "node_modules/@vercel/oidc": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@vercel/oidc/-/oidc-3.0.1.tgz",
- "integrity": "sha512-V/YRVrJDqM6VaMBjRUrd6qRMrTKvZjHdVdEmdXsOZMulTa3iK98ijKTc3wldBmst6W5rHpqMoKllKcBAHgN7GQ==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">= 20"
- }
- },
"node_modules/@vimeo/player": {
"version": "2.29.0",
"resolved": "https://registry.npmjs.org/@vimeo/player/-/player-2.29.0.tgz",
@@ -5182,6 +6077,117 @@
"weakmap-polyfill": "2.0.4"
}
},
+ "node_modules/@vitest/expect": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.18.tgz",
+ "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@standard-schema/spec": "^1.0.0",
+ "@types/chai": "^5.2.2",
+ "@vitest/spy": "4.0.18",
+ "@vitest/utils": "4.0.18",
+ "chai": "^6.2.1",
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/mocker": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.18.tgz",
+ "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/spy": "4.0.18",
+ "estree-walker": "^3.0.3",
+ "magic-string": "^0.30.21"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "msw": "^2.4.9",
+ "vite": "^6.0.0 || ^7.0.0-0"
+ },
+ "peerDependenciesMeta": {
+ "msw": {
+ "optional": true
+ },
+ "vite": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@vitest/pretty-format": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.18.tgz",
+ "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/runner": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.18.tgz",
+ "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/utils": "4.0.18",
+ "pathe": "^2.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/snapshot": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.18.tgz",
+ "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/pretty-format": "4.0.18",
+ "magic-string": "^0.30.21",
+ "pathe": "^2.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/spy": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.18.tgz",
+ "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/utils": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.18.tgz",
+ "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/pretty-format": "4.0.18",
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
"node_modules/@webassemblyjs/ast": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz",
@@ -5450,24 +6456,6 @@
"node": ">=8"
}
},
- "node_modules/ai": {
- "version": "5.0.60",
- "resolved": "https://registry.npmjs.org/ai/-/ai-5.0.60.tgz",
- "integrity": "sha512-80U/3kmdBW6g+JkLXpz/P2EwkyEaWlPlYtuLUpx/JYK9F7WZh9NnkYoh1KvUi1Sbpo0NyurBTvX0a2AG9mmbDA==",
- "license": "Apache-2.0",
- "dependencies": {
- "@ai-sdk/gateway": "1.0.33",
- "@ai-sdk/provider": "2.0.0",
- "@ai-sdk/provider-utils": "3.0.10",
- "@opentelemetry/api": "1.9.0"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "zod": "^3.25.76 || ^4.1.8"
- }
- },
"node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
@@ -5514,34 +6502,34 @@
}
},
"node_modules/algoliasearch": {
- "version": "5.39.0",
- "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.39.0.tgz",
- "integrity": "sha512-DzTfhUxzg9QBNGzU/0kZkxEV72TeA4MmPJ7RVfLnQwHNhhliPo7ynglEWJS791rNlLFoTyrKvkapwr/P3EXV9A==",
- "license": "MIT",
- "dependencies": {
- "@algolia/abtesting": "1.5.0",
- "@algolia/client-abtesting": "5.39.0",
- "@algolia/client-analytics": "5.39.0",
- "@algolia/client-common": "5.39.0",
- "@algolia/client-insights": "5.39.0",
- "@algolia/client-personalization": "5.39.0",
- "@algolia/client-query-suggestions": "5.39.0",
- "@algolia/client-search": "5.39.0",
- "@algolia/ingestion": "1.39.0",
- "@algolia/monitoring": "1.39.0",
- "@algolia/recommend": "5.39.0",
- "@algolia/requester-browser-xhr": "5.39.0",
- "@algolia/requester-fetch": "5.39.0",
- "@algolia/requester-node-http": "5.39.0"
+ "version": "5.48.0",
+ "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.48.0.tgz",
+ "integrity": "sha512-aD8EQC6KEman6/S79FtPdQmB7D4af/etcRL/KwiKFKgAE62iU8c5PeEQvpvIcBPurC3O/4Lj78nOl7ZcoazqSw==",
+ "license": "MIT",
+ "dependencies": {
+ "@algolia/abtesting": "1.14.0",
+ "@algolia/client-abtesting": "5.48.0",
+ "@algolia/client-analytics": "5.48.0",
+ "@algolia/client-common": "5.48.0",
+ "@algolia/client-insights": "5.48.0",
+ "@algolia/client-personalization": "5.48.0",
+ "@algolia/client-query-suggestions": "5.48.0",
+ "@algolia/client-search": "5.48.0",
+ "@algolia/ingestion": "1.48.0",
+ "@algolia/monitoring": "1.48.0",
+ "@algolia/recommend": "5.48.0",
+ "@algolia/requester-browser-xhr": "5.48.0",
+ "@algolia/requester-fetch": "5.48.0",
+ "@algolia/requester-node-http": "5.48.0"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/algoliasearch-helper": {
- "version": "3.26.0",
- "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.26.0.tgz",
- "integrity": "sha512-Rv2x3GXleQ3ygwhkhJubhhYGsICmShLAiqtUuJTUkr9uOCOXyF2E71LVT4XDnVffbknv8XgScP4U0Oxtgm+hIw==",
+ "version": "3.27.1",
+ "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.27.1.tgz",
+ "integrity": "sha512-XXGr02Cz285vLbqM6vPfb39xqV1ptpFr1xn9mqaW+nUvYTvFTdKgYTC/Cg1VzgRTQqNkq9+LlUVv8cfCeOoKig==",
"license": "MIT",
"dependencies": {
"@algolia/events": "^4.0.1"
@@ -5682,6 +6670,16 @@
"node": ">=8"
}
},
+ "node_modules/assertion-error": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
+ "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ }
+ },
"node_modules/astring": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz",
@@ -5692,9 +6690,9 @@
}
},
"node_modules/autoprefixer": {
- "version": "10.4.21",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz",
- "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==",
+ "version": "10.4.24",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz",
+ "integrity": "sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==",
"funding": [
{
"type": "opencollective",
@@ -5711,10 +6709,9 @@
],
"license": "MIT",
"dependencies": {
- "browserslist": "^4.24.4",
- "caniuse-lite": "^1.0.30001702",
- "fraction.js": "^4.3.7",
- "normalize-range": "^0.1.2",
+ "browserslist": "^4.28.1",
+ "caniuse-lite": "^1.0.30001766",
+ "fraction.js": "^5.3.4",
"picocolors": "^1.1.1",
"postcss-value-parser": "^4.2.0"
},
@@ -5755,13 +6752,13 @@
}
},
"node_modules/babel-plugin-polyfill-corejs2": {
- "version": "0.4.14",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.14.tgz",
- "integrity": "sha512-Co2Y9wX854ts6U8gAAPXfn0GmAyctHuK8n0Yhfjd6t30g7yvKjspvvOo9yG+z52PZRgFErt7Ka2pYnXCjLKEpg==",
+ "version": "0.4.15",
+ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.15.tgz",
+ "integrity": "sha512-hR3GwrRwHUfYwGfrisXPIDP3JcYfBrW7wKE7+Au6wDYl7fm/ka1NEII6kORzxNU556JjfidZeBsO10kYvtV1aw==",
"license": "MIT",
"dependencies": {
- "@babel/compat-data": "^7.27.7",
- "@babel/helper-define-polyfill-provider": "^0.6.5",
+ "@babel/compat-data": "^7.28.6",
+ "@babel/helper-define-polyfill-provider": "^0.6.6",
"semver": "^6.3.1"
},
"peerDependencies": {
@@ -5791,12 +6788,12 @@
}
},
"node_modules/babel-plugin-polyfill-regenerator": {
- "version": "0.6.5",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.5.tgz",
- "integrity": "sha512-ISqQ2frbiNU9vIJkzg7dlPpznPZ4jOiUQ1uSmB0fEHeowtN3COYRsXr/xexn64NpU13P06jc/L5TgiJXOgrbEg==",
+ "version": "0.6.6",
+ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.6.tgz",
+ "integrity": "sha512-hYm+XLYRMvupxiQzrvXUj7YyvFFVfv5gI0R71AJzudg1g2AI2vyCPPIFEBjk162/wFzti3inBHo7isWFuEVS/A==",
"license": "MIT",
"dependencies": {
- "@babel/helper-define-polyfill-provider": "^0.6.5"
+ "@babel/helper-define-polyfill-provider": "^0.6.6"
},
"peerDependencies": {
"@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
@@ -5819,9 +6816,9 @@
"license": "MIT"
},
"node_modules/baseline-browser-mapping": {
- "version": "2.8.12",
- "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.12.tgz",
- "integrity": "sha512-vAPMQdnyKCBtkmQA6FMCBvU9qFIppS3nzyXnEM+Lo2IAhG4Mpjv9cCxMudhgV3YdNNJv6TNqXy97dfRVL2LmaQ==",
+ "version": "2.9.19",
+ "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz",
+ "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==",
"license": "Apache-2.0",
"bin": {
"baseline-browser-mapping": "dist/cli.js"
@@ -6002,9 +6999,9 @@
}
},
"node_modules/browserslist": {
- "version": "4.26.3",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.3.tgz",
- "integrity": "sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
+ "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
"funding": [
{
"type": "opencollective",
@@ -6021,11 +7018,11 @@
],
"license": "MIT",
"dependencies": {
- "baseline-browser-mapping": "^2.8.9",
- "caniuse-lite": "^1.0.30001746",
- "electron-to-chromium": "^1.5.227",
- "node-releases": "^2.0.21",
- "update-browserslist-db": "^1.1.3"
+ "baseline-browser-mapping": "^2.9.0",
+ "caniuse-lite": "^1.0.30001759",
+ "electron-to-chromium": "^1.5.263",
+ "node-releases": "^2.0.27",
+ "update-browserslist-db": "^1.2.0"
},
"bin": {
"browserslist": "cli.js"
@@ -6182,9 +7179,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001748",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001748.tgz",
- "integrity": "sha512-5P5UgAr0+aBmNiplks08JLw+AW/XG/SurlgZLgB1dDLfAw7EfRGxIwzPHxdSCGY/BTKDqIVyJL87cCN6s0ZR0w==",
+ "version": "1.0.30001769",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001769.tgz",
+ "integrity": "sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg==",
"funding": [
{
"type": "opencollective",
@@ -6229,6 +7226,16 @@
"react": ">=17.0.0"
}
},
+ "node_modules/chai": {
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
+ "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@@ -6806,12 +7813,12 @@
}
},
"node_modules/core-js-compat": {
- "version": "3.45.1",
- "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.45.1.tgz",
- "integrity": "sha512-tqTt5T4PzsMIZ430XGviK4vzYSoeNJ6CXODi6c/voxOT6IZqBht5/EKaSNnYiEjjRYxjVz7DQIsOsY0XNi8PIA==",
+ "version": "3.48.0",
+ "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.48.0.tgz",
+ "integrity": "sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==",
"license": "MIT",
"dependencies": {
- "browserslist": "^4.25.3"
+ "browserslist": "^4.28.1"
},
"funding": {
"type": "opencollective",
@@ -6819,9 +7826,9 @@
}
},
"node_modules/core-js-pure": {
- "version": "3.45.1",
- "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.45.1.tgz",
- "integrity": "sha512-OHnWFKgTUshEU8MK+lOs1H8kC8GkTi9Z1tvNkxrCcw9wl3MJIO7q2ld77wjWn4/xuGrVu2X+nME1iIIPBSdyEQ==",
+ "version": "3.48.0",
+ "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.48.0.tgz",
+ "integrity": "sha512-1slJgk89tWC51HQ1AEqG+s2VuwpTRr8ocu4n20QUcH1v9lAN0RXen0Q0AABa/DK1I7RrNWLucplOHMx8hfTGTw==",
"hasInstallScript": true,
"license": "MIT",
"funding": {
@@ -6928,9 +7935,9 @@
}
},
"node_modules/css-blank-pseudo/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -6941,9 +7948,9 @@
}
},
"node_modules/css-declaration-sorter": {
- "version": "7.3.0",
- "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.3.0.tgz",
- "integrity": "sha512-LQF6N/3vkAMYF4xoHLJfG718HRJh34Z8BnNhd6bosOMIVjMlhuZK5++oZa3uYAgrI5+7x2o27gUqTR2U/KjUOQ==",
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.3.1.tgz",
+ "integrity": "sha512-gz6x+KkgNCjxq3Var03pRYLhyNfwhkKF1g/yoLgDNtFvVu0/fOLV9C8fFEZRjACp/XQLumjAYo7JVjzH3wLbxA==",
"license": "ISC",
"engines": {
"node": "^14 || ^16 || >=18"
@@ -7002,9 +8009,9 @@
}
},
"node_modules/css-has-pseudo/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -7157,9 +8164,9 @@
}
},
"node_modules/cssdb": {
- "version": "8.4.2",
- "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-8.4.2.tgz",
- "integrity": "sha512-PzjkRkRUS+IHDJohtxkIczlxPPZqRo0nXplsYXOMBRPjcVRjj1W4DfvRgshUYTVuUigU7ptVYkFJQ7abUB0nyg==",
+ "version": "8.7.1",
+ "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-8.7.1.tgz",
+ "integrity": "sha512-+F6LKx48RrdGOtE4DT5jz7Uo+VeyKXpK797FAevIkzjV8bMHz6xTO5F7gNDcRCHmPgD5jj2g6QCsY9zmVrh38A==",
"funding": [
{
"type": "opencollective",
@@ -7735,9 +8742,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
- "version": "1.5.232",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.232.tgz",
- "integrity": "sha512-ENirSe7wf8WzyPCibqKUG1Cg43cPaxH4wRR7AJsX7MCABCHBIOFqvaYODSLKUuZdraxUTHRE/0A2Aq8BYKEHOg==",
+ "version": "1.5.286",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz",
+ "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==",
"license": "ISC"
},
"node_modules/emoji-regex": {
@@ -7882,6 +8889,48 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/esbuild": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
+ "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.27.3",
+ "@esbuild/android-arm": "0.27.3",
+ "@esbuild/android-arm64": "0.27.3",
+ "@esbuild/android-x64": "0.27.3",
+ "@esbuild/darwin-arm64": "0.27.3",
+ "@esbuild/darwin-x64": "0.27.3",
+ "@esbuild/freebsd-arm64": "0.27.3",
+ "@esbuild/freebsd-x64": "0.27.3",
+ "@esbuild/linux-arm": "0.27.3",
+ "@esbuild/linux-arm64": "0.27.3",
+ "@esbuild/linux-ia32": "0.27.3",
+ "@esbuild/linux-loong64": "0.27.3",
+ "@esbuild/linux-mips64el": "0.27.3",
+ "@esbuild/linux-ppc64": "0.27.3",
+ "@esbuild/linux-riscv64": "0.27.3",
+ "@esbuild/linux-s390x": "0.27.3",
+ "@esbuild/linux-x64": "0.27.3",
+ "@esbuild/netbsd-arm64": "0.27.3",
+ "@esbuild/netbsd-x64": "0.27.3",
+ "@esbuild/openbsd-arm64": "0.27.3",
+ "@esbuild/openbsd-x64": "0.27.3",
+ "@esbuild/openharmony-arm64": "0.27.3",
+ "@esbuild/sunos-x64": "0.27.3",
+ "@esbuild/win32-arm64": "0.27.3",
+ "@esbuild/win32-ia32": "0.27.3",
+ "@esbuild/win32-x64": "0.27.3"
+ }
+ },
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@@ -8046,9 +9095,9 @@
}
},
"node_modules/estree-util-value-to-estree": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.4.0.tgz",
- "integrity": "sha512-Zlp+gxis+gCfK12d3Srl2PdX2ybsEA8ZYy6vQGVQTNNYLEGRQQ56XB64bjemN8kxIKXP1nC9ip4Z+ILy9LGzvQ==",
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.5.0.tgz",
+ "integrity": "sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==",
"license": "MIT",
"dependencies": {
"@types/estree": "^1.0.0"
@@ -8137,15 +9186,6 @@
"node": ">=0.8.x"
}
},
- "node_modules/eventsource-parser": {
- "version": "3.0.6",
- "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz",
- "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==",
- "license": "MIT",
- "engines": {
- "node": ">=18.0.0"
- }
- },
"node_modules/execa": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
@@ -8169,6 +9209,16 @@
"url": "https://github.com/sindresorhus/execa?sponsor=1"
}
},
+ "node_modules/expect-type": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
+ "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=12.0.0"
+ }
+ },
"node_modules/express": {
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
@@ -8591,15 +9641,15 @@
}
},
"node_modules/fraction.js": {
- "version": "4.3.7",
- "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz",
- "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==",
+ "version": "5.3.4",
+ "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz",
+ "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==",
"license": "MIT",
"engines": {
"node": "*"
},
"funding": {
- "type": "patreon",
+ "type": "github",
"url": "https://github.com/sponsors/rawify"
}
},
@@ -8658,6 +9708,18 @@
"node": ">=6.9.0"
}
},
+ "node_modules/get-east-asian-width": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
+ "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
@@ -8868,9 +9930,9 @@
}
},
"node_modules/gray-matter/node_modules/js-yaml": {
- "version": "3.14.1",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
- "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
+ "version": "3.14.2",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
+ "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"license": "MIT",
"dependencies": {
"argparse": "^1.0.7",
@@ -9072,15 +10134,15 @@
}
},
"node_modules/hast-util-to-parse5": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz",
- "integrity": "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==",
+ "version": "8.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.1.tgz",
+ "integrity": "sha512-MlWT6Pjt4CG9lFCjiz4BH7l9wmrMkfkJYCxFwKQic8+RTZgWPuWxwAfjJElsXkex7DJjfSJsQIt931ilUgmwdA==",
"license": "MIT",
"dependencies": {
"@types/hast": "^3.0.0",
"comma-separated-tokens": "^2.0.0",
"devlop": "^1.0.0",
- "property-information": "^6.0.0",
+ "property-information": "^7.0.0",
"space-separated-tokens": "^2.0.0",
"web-namespaces": "^2.0.0",
"zwitch": "^2.0.0"
@@ -9090,16 +10152,6 @@
"url": "https://opencollective.com/unified"
}
},
- "node_modules/hast-util-to-parse5/node_modules/property-information": {
- "version": "6.5.0",
- "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz",
- "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==",
- "license": "MIT",
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/wooorm"
- }
- },
"node_modules/hast-util-whitespace": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz",
@@ -9625,9 +10677,9 @@
}
},
"node_modules/inline-style-parser": {
- "version": "0.2.4",
- "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.4.tgz",
- "integrity": "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==",
+ "version": "0.2.7",
+ "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz",
+ "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==",
"license": "MIT"
},
"node_modules/invariant": {
@@ -10060,9 +11112,9 @@
"license": "MIT"
},
"node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
"license": "MIT",
"dependencies": {
"argparse": "^2.0.1"
@@ -10095,12 +11147,6 @@
"integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
"license": "MIT"
},
- "node_modules/json-schema": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
- "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==",
- "license": "(AFL-2.1 OR BSD-3-Clause)"
- },
"node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
@@ -10119,6 +11165,12 @@
"node": ">=6"
}
},
+ "node_modules/jsonc-parser": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz",
+ "integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==",
+ "license": "MIT"
+ },
"node_modules/jsonfile": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz",
@@ -10131,6 +11183,31 @@
"graceful-fs": "^4.1.6"
}
},
+ "node_modules/katex": {
+ "version": "0.16.28",
+ "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.28.tgz",
+ "integrity": "sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==",
+ "funding": [
+ "https://opencollective.com/katex",
+ "https://github.com/sponsors/katex"
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "commander": "^8.3.0"
+ },
+ "bin": {
+ "katex": "cli.js"
+ }
+ },
+ "node_modules/katex/node_modules/commander": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
+ "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 12"
+ }
+ },
"node_modules/keyv": {
"version": "4.5.4",
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
@@ -10219,6 +11296,15 @@
"integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
"license": "MIT"
},
+ "node_modules/linkify-it": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
+ "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
+ "license": "MIT",
+ "dependencies": {
+ "uc.micro": "^2.0.0"
+ }
+ },
"node_modules/loader-runner": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz",
@@ -10296,84 +11382,329 @@
"integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
"license": "MIT",
"funding": {
- "type": "github",
- "url": "https://github.com/sponsors/wooorm"
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/loose-envify": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
+ "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
+ "license": "MIT",
+ "dependencies": {
+ "js-tokens": "^3.0.0 || ^4.0.0"
+ },
+ "bin": {
+ "loose-envify": "cli.js"
+ }
+ },
+ "node_modules/lower-case": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
+ "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
+ "license": "MIT",
+ "dependencies": {
+ "tslib": "^2.0.3"
+ }
+ },
+ "node_modules/lowercase-keys": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz",
+ "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==",
+ "license": "MIT",
+ "engines": {
+ "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/lru-cache": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
+ "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
+ "license": "ISC",
+ "dependencies": {
+ "yallist": "^3.0.2"
+ }
+ },
+ "node_modules/magic-string": {
+ "version": "0.30.21",
+ "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
+ "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.5.5"
+ }
+ },
+ "node_modules/markdown-extensions": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz",
+ "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=16"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/markdown-it": {
+ "version": "14.1.0",
+ "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
+ "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
+ "license": "MIT",
+ "dependencies": {
+ "argparse": "^2.0.1",
+ "entities": "^4.4.0",
+ "linkify-it": "^5.0.0",
+ "mdurl": "^2.0.0",
+ "punycode.js": "^2.3.1",
+ "uc.micro": "^2.1.0"
+ },
+ "bin": {
+ "markdown-it": "bin/markdown-it.mjs"
+ }
+ },
+ "node_modules/markdown-table": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
+ "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
+ "license": "MIT",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/markdownlint": {
+ "version": "0.40.0",
+ "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.40.0.tgz",
+ "integrity": "sha512-UKybllYNheWac61Ia7T6fzuQNDZimFIpCg2w6hHjgV1Qu0w1TV0LlSgryUGzM0bkKQCBhy2FDhEELB73Kb0kAg==",
+ "license": "MIT",
+ "dependencies": {
+ "micromark": "4.0.2",
+ "micromark-core-commonmark": "2.0.3",
+ "micromark-extension-directive": "4.0.0",
+ "micromark-extension-gfm-autolink-literal": "2.1.0",
+ "micromark-extension-gfm-footnote": "2.1.0",
+ "micromark-extension-gfm-table": "2.1.1",
+ "micromark-extension-math": "3.1.0",
+ "micromark-util-types": "2.0.2",
+ "string-width": "8.1.0"
+ },
+ "engines": {
+ "node": ">=20"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/DavidAnson"
}
},
- "node_modules/loose-envify": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
- "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
+ "node_modules/markdownlint-cli2": {
+ "version": "0.20.0",
+ "resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.20.0.tgz",
+ "integrity": "sha512-esPk+8Qvx/f0bzI7YelUeZp+jCtFOk3KjZ7s9iBQZ6HlymSXoTtWGiIRZP05/9Oy2ehIoIjenVwndxGtxOIJYQ==",
"license": "MIT",
"dependencies": {
- "js-tokens": "^3.0.0 || ^4.0.0"
+ "globby": "15.0.0",
+ "js-yaml": "4.1.1",
+ "jsonc-parser": "3.3.1",
+ "markdown-it": "14.1.0",
+ "markdownlint": "0.40.0",
+ "markdownlint-cli2-formatter-default": "0.0.6",
+ "micromatch": "4.0.8"
},
"bin": {
- "loose-envify": "cli.js"
+ "markdownlint-cli2": "markdownlint-cli2-bin.mjs"
+ },
+ "engines": {
+ "node": ">=20"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/DavidAnson"
}
},
- "node_modules/lower-case": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
- "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
+ "node_modules/markdownlint-cli2-formatter-default": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/markdownlint-cli2-formatter-default/-/markdownlint-cli2-formatter-default-0.0.6.tgz",
+ "integrity": "sha512-VVDGKsq9sgzu378swJ0fcHfSicUnMxnL8gnLm/Q4J/xsNJ4e5bA6lvAz7PCzIl0/No0lHyaWdqVD2jotxOSFMQ==",
"license": "MIT",
- "dependencies": {
- "tslib": "^2.0.3"
+ "funding": {
+ "url": "https://github.com/sponsors/DavidAnson"
+ },
+ "peerDependencies": {
+ "markdownlint-cli2": ">=0.0.4"
}
},
- "node_modules/lowercase-keys": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz",
- "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==",
+ "node_modules/markdownlint-cli2/node_modules/globby": {
+ "version": "15.0.0",
+ "resolved": "https://registry.npmjs.org/globby/-/globby-15.0.0.tgz",
+ "integrity": "sha512-oB4vkQGqlMl682wL1IlWd02tXCbquGWM4voPEI85QmNKCaw8zGTm1f1rubFgkg3Eli2PtKlFgrnmUqasbQWlkw==",
"license": "MIT",
+ "dependencies": {
+ "@sindresorhus/merge-streams": "^4.0.0",
+ "fast-glob": "^3.3.3",
+ "ignore": "^7.0.5",
+ "path-type": "^6.0.0",
+ "slash": "^5.1.0",
+ "unicorn-magic": "^0.3.0"
+ },
"engines": {
- "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
+ "node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/lru-cache": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
- "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
- "license": "ISC",
- "dependencies": {
- "yallist": "^3.0.2"
+ "node_modules/markdownlint-cli2/node_modules/ignore": {
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
+ "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 4"
}
},
- "node_modules/markdown-extensions": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz",
- "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==",
+ "node_modules/markdownlint-cli2/node_modules/path-type": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz",
+ "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==",
"license": "MIT",
"engines": {
- "node": ">=16"
+ "node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/markdown-table": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
- "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
+ "node_modules/markdownlint-cli2/node_modules/slash": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
+ "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==",
"license": "MIT",
+ "engines": {
+ "node": ">=14.16"
+ },
"funding": {
- "type": "github",
- "url": "https://github.com/sponsors/wooorm"
+ "url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/marked": {
- "version": "16.4.0",
- "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.0.tgz",
- "integrity": "sha512-CTPAcRBq57cn3R8n3hwc2REddc28hjR7RzDXQ+lXLmMJYqn20BaI2cGw6QjgZGIgVfp2Wdfw4aMzgNteQ6qJgQ==",
+ "node_modules/markdownlint/node_modules/ansi-regex": {
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+ "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"license": "MIT",
- "bin": {
- "marked": "bin/marked.js"
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-regex?sponsor=1"
+ }
+ },
+ "node_modules/markdownlint/node_modules/micromark-extension-directive": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-4.0.0.tgz",
+ "integrity": "sha512-/C2nqVmXXmiseSSuCdItCMho7ybwwop6RrrRPk0KbOHW21JKoCldC+8rFOaundDoRBUWBnJJcxeA/Kvi34WQXg==",
+ "license": "MIT",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-factory-whitespace": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0",
+ "parse-entities": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/markdownlint/node_modules/micromark-factory-space": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+ "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/markdownlint/node_modules/micromark-util-character": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+ "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/markdownlint/node_modules/micromark-util-symbol": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+ "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/markdownlint/node_modules/string-width": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
+ "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
+ "license": "MIT",
+ "dependencies": {
+ "get-east-asian-width": "^1.3.0",
+ "strip-ansi": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=20"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/markdownlint/node_modules/strip-ansi": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+ "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^6.0.1"
},
"engines": {
- "node": ">= 20"
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/math-intrinsics": {
@@ -10733,9 +12064,9 @@
}
},
"node_modules/mdast-util-to-hast": {
- "version": "13.2.0",
- "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz",
- "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==",
+ "version": "13.2.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz",
+ "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==",
"license": "MIT",
"dependencies": {
"@types/hast": "^3.0.0",
@@ -10793,6 +12124,12 @@
"integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==",
"license": "CC0-1.0"
},
+ "node_modules/mdurl": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
+ "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==",
+ "license": "MIT"
+ },
"node_modules/media-chrome": {
"version": "4.14.0",
"resolved": "https://registry.npmjs.org/media-chrome/-/media-chrome-4.14.0.tgz",
@@ -11461,6 +12798,81 @@
],
"license": "MIT"
},
+ "node_modules/micromark-extension-math": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-math/-/micromark-extension-math-3.1.0.tgz",
+ "integrity": "sha512-lvEqd+fHjATVs+2v/8kg9i5Q0AP2k85H0WUOwpIVvUML8BapsMvh1XAogmQjOCsLpoKRCVQqEkQBB3NhVBcsOg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/katex": "^0.16.0",
+ "devlop": "^1.0.0",
+ "katex": "^0.16.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-math/node_modules/micromark-factory-space": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+ "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-extension-math/node_modules/micromark-util-character": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+ "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-extension-math/node_modules/micromark-util-symbol": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+ "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "license": "MIT"
+ },
"node_modules/micromark-extension-mdx-expression": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz",
@@ -12720,9 +14132,9 @@
}
},
"node_modules/mini-css-extract-plugin": {
- "version": "2.9.4",
- "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.4.tgz",
- "integrity": "sha512-ZWYT7ln73Hptxqxk2DxPU9MmapXRhxkJD6tkSR04dnQxm8BGu2hzgKLugK5yySD97u/8yy7Ma7E76k9ZdvtjkQ==",
+ "version": "2.10.0",
+ "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.10.0.tgz",
+ "integrity": "sha512-540P2c5dYnJlyJxTaSloliZexv8rji6rY8FhQN+WF/82iHQfA23j/xtJx97L+mXOML27EqksSek/g4eK7jaL3g==",
"license": "MIT",
"dependencies": {
"schema-utils": "^4.0.0",
@@ -12874,9 +14286,9 @@
}
},
"node_modules/node-releases": {
- "version": "2.0.23",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.23.tgz",
- "integrity": "sha512-cCmFDMSm26S6tQSDpBCg/NR8NENrVPhAJSf+XbxBG4rPFaaonlEoE9wHQmun+cls499TQGSb7ZyPBRlzgKfpeg==",
+ "version": "2.0.27",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
+ "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==",
"license": "MIT"
},
"node_modules/normalize-path": {
@@ -12888,15 +14300,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/normalize-range": {
- "version": "0.1.2",
- "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz",
- "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==",
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
"node_modules/normalize-url": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.1.0.tgz",
@@ -13064,6 +14467,17 @@
"integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==",
"license": "MIT"
},
+ "node_modules/obug": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz",
+ "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==",
+ "dev": true,
+ "funding": [
+ "https://github.com/sponsors/sxzz",
+ "https://opencollective.com/debug"
+ ],
+ "license": "MIT"
+ },
"node_modules/on-finished": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
@@ -13433,6 +14847,13 @@
"node": ">=8"
}
},
+ "node_modules/pathe": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
+ "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@@ -13545,9 +14966,9 @@
}
},
"node_modules/postcss-attribute-case-insensitive/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -13789,9 +15210,9 @@
}
},
"node_modules/postcss-custom-selectors/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -13827,9 +15248,9 @@
}
},
"node_modules/postcss-dir-pseudo-class/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -13955,9 +15376,9 @@
}
},
"node_modules/postcss-focus-visible/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -13993,9 +15414,9 @@
}
},
"node_modules/postcss-focus-within/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -14282,9 +15703,9 @@
}
},
"node_modules/postcss-modules-local-by-default/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -14310,9 +15731,9 @@
}
},
"node_modules/postcss-modules-scope/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -14409,9 +15830,9 @@
}
},
"node_modules/postcss-nesting/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -14652,9 +16073,9 @@
}
},
"node_modules/postcss-preset-env": {
- "version": "10.4.0",
- "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-10.4.0.tgz",
- "integrity": "sha512-2kqpOthQ6JhxqQq1FSAAZGe9COQv75Aw8WbsOvQVNJ2nSevc9Yx/IKZGuZ7XJ+iOTtVon7LfO7ELRzg8AZ+sdw==",
+ "version": "10.6.1",
+ "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-10.6.1.tgz",
+ "integrity": "sha512-yrk74d9EvY+W7+lO9Aj1QmjWY9q5NsKjK2V9drkOPZB/X6KZ0B3igKsHUYakb7oYVhnioWypQX3xGuePf89f3g==",
"funding": [
{
"type": "github",
@@ -14692,23 +16113,27 @@
"@csstools/postcss-media-minmax": "^2.0.9",
"@csstools/postcss-media-queries-aspect-ratio-number-values": "^3.0.5",
"@csstools/postcss-nested-calc": "^4.0.0",
- "@csstools/postcss-normalize-display-values": "^4.0.0",
+ "@csstools/postcss-normalize-display-values": "^4.0.1",
"@csstools/postcss-oklab-function": "^4.0.12",
+ "@csstools/postcss-position-area-property": "^1.0.0",
"@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/postcss-property-rule-prelude-list": "^1.0.0",
"@csstools/postcss-random-function": "^2.0.1",
"@csstools/postcss-relative-color-syntax": "^3.0.12",
"@csstools/postcss-scope-pseudo-class": "^4.0.1",
"@csstools/postcss-sign-functions": "^1.1.4",
"@csstools/postcss-stepped-value-functions": "^4.0.9",
+ "@csstools/postcss-syntax-descriptor-syntax-production": "^1.0.1",
+ "@csstools/postcss-system-ui-font-family": "^1.0.0",
"@csstools/postcss-text-decoration-shorthand": "^4.0.3",
"@csstools/postcss-trigonometric-functions": "^4.0.9",
"@csstools/postcss-unset-value": "^4.0.0",
- "autoprefixer": "^10.4.21",
- "browserslist": "^4.26.0",
+ "autoprefixer": "^10.4.23",
+ "browserslist": "^4.28.1",
"css-blank-pseudo": "^7.0.1",
"css-has-pseudo": "^7.0.3",
"css-prefers-color-scheme": "^10.0.0",
- "cssdb": "^8.4.2",
+ "cssdb": "^8.6.0",
"postcss-attribute-case-insensitive": "^7.0.1",
"postcss-clamp": "^4.1.0",
"postcss-color-functional-notation": "^7.0.12",
@@ -14768,9 +16193,9 @@
}
},
"node_modules/postcss-pseudo-class-any-link/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -14861,9 +16286,9 @@
}
},
"node_modules/postcss-selector-not/node_modules/postcss-selector-parser": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
- "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz",
+ "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
@@ -15068,6 +16493,15 @@
"node": ">=6"
}
},
+ "node_modules/punycode.js": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
+ "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
"node_modules/pupa": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/pupa/-/pupa-3.3.0.tgz",
@@ -15914,12 +17348,12 @@
"license": "MIT"
},
"node_modules/resolve": {
- "version": "1.22.10",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
- "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
+ "version": "1.22.11",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz",
+ "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==",
"license": "MIT",
"dependencies": {
- "is-core-module": "^2.16.0",
+ "is-core-module": "^2.16.1",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
@@ -15988,6 +17422,51 @@
"node": ">=0.10.0"
}
},
+ "node_modules/rollup": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz",
+ "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "1.0.8"
+ },
+ "bin": {
+ "rollup": "dist/bin/rollup"
+ },
+ "engines": {
+ "node": ">=18.0.0",
+ "npm": ">=8.0.0"
+ },
+ "optionalDependencies": {
+ "@rollup/rollup-android-arm-eabi": "4.57.1",
+ "@rollup/rollup-android-arm64": "4.57.1",
+ "@rollup/rollup-darwin-arm64": "4.57.1",
+ "@rollup/rollup-darwin-x64": "4.57.1",
+ "@rollup/rollup-freebsd-arm64": "4.57.1",
+ "@rollup/rollup-freebsd-x64": "4.57.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.57.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.57.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.57.1",
+ "@rollup/rollup-linux-arm64-musl": "4.57.1",
+ "@rollup/rollup-linux-loong64-gnu": "4.57.1",
+ "@rollup/rollup-linux-loong64-musl": "4.57.1",
+ "@rollup/rollup-linux-ppc64-gnu": "4.57.1",
+ "@rollup/rollup-linux-ppc64-musl": "4.57.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.57.1",
+ "@rollup/rollup-linux-riscv64-musl": "4.57.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.57.1",
+ "@rollup/rollup-linux-x64-gnu": "4.57.1",
+ "@rollup/rollup-linux-x64-musl": "4.57.1",
+ "@rollup/rollup-openbsd-x64": "4.57.1",
+ "@rollup/rollup-openharmony-arm64": "4.57.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.57.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.57.1",
+ "@rollup/rollup-win32-x64-gnu": "4.57.1",
+ "@rollup/rollup-win32-x64-msvc": "4.57.1",
+ "fsevents": "~2.3.2"
+ }
+ },
"node_modules/rtlcss": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-4.3.0.tgz",
@@ -16499,6 +17978,13 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/siginfo": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
+ "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
+ "dev": true,
+ "license": "ISC"
+ },
"node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
@@ -16551,10 +18037,13 @@
"license": "MIT"
},
"node_modules/sitemap/node_modules/sax": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz",
- "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==",
- "license": "ISC"
+ "version": "1.4.4",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.4.tgz",
+ "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==",
+ "license": "BlueOak-1.0.0",
+ "engines": {
+ "node": ">=11.0.0"
+ }
},
"node_modules/skin-tone": {
"version": "2.0.0",
@@ -16708,6 +18197,13 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/stackback": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
+ "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@@ -16718,9 +18214,9 @@
}
},
"node_modules/std-env": {
- "version": "3.9.0",
- "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz",
- "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==",
+ "version": "3.10.0",
+ "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz",
+ "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
"license": "MIT"
},
"node_modules/string_decoder": {
@@ -16847,21 +18343,21 @@
}
},
"node_modules/style-to-js": {
- "version": "1.1.17",
- "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.17.tgz",
- "integrity": "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA==",
+ "version": "1.1.21",
+ "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz",
+ "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==",
"license": "MIT",
"dependencies": {
- "style-to-object": "1.0.9"
+ "style-to-object": "1.0.14"
}
},
"node_modules/style-to-object": {
- "version": "1.0.9",
- "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.9.tgz",
- "integrity": "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw==",
+ "version": "1.0.14",
+ "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz",
+ "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==",
"license": "MIT",
"dependencies": {
- "inline-style-parser": "0.2.4"
+ "inline-style-parser": "0.2.7"
}
},
"node_modules/stylehacks": {
@@ -16950,19 +18446,6 @@
"node": ">= 10"
}
},
- "node_modules/swr": {
- "version": "2.3.6",
- "resolved": "https://registry.npmjs.org/swr/-/swr-2.3.6.tgz",
- "integrity": "sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==",
- "license": "MIT",
- "dependencies": {
- "dequal": "^2.0.3",
- "use-sync-external-store": "^1.4.0"
- },
- "peerDependencies": {
- "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
- }
- },
"node_modules/tapable": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz",
@@ -17079,18 +18562,6 @@
"tslib": "^2"
}
},
- "node_modules/throttleit": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz",
- "integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==",
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/thunky": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz",
@@ -17115,6 +18586,71 @@
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==",
"license": "MIT"
},
+ "node_modules/tinybench": {
+ "version": "2.9.0",
+ "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
+ "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tinyexec": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz",
+ "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tinyglobby": {
+ "version": "0.2.15",
+ "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
+ "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/SuperchupuDev"
+ }
+ },
+ "node_modules/tinyglobby/node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/tinyglobby/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
"node_modules/tinypool": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz",
@@ -17124,6 +18660,16 @@
"node": "^18.0.0 || >=20.0.0"
}
},
+ "node_modules/tinyrainbow": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz",
+ "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@@ -17297,6 +18843,12 @@
"node": "*"
}
},
+ "node_modules/uc.micro": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
+ "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
+ "license": "MIT"
+ },
"node_modules/undici-types": {
"version": "7.14.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz",
@@ -17352,6 +18904,18 @@
"node": ">=4"
}
},
+ "node_modules/unicorn-magic": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz",
+ "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/unified": {
"version": "11.0.5",
"resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
@@ -17387,9 +18951,9 @@
}
},
"node_modules/unist-util-is": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
- "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz",
+ "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==",
"license": "MIT",
"dependencies": {
"@types/unist": "^3.0.0"
@@ -17439,9 +19003,9 @@
}
},
"node_modules/unist-util-visit": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
- "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz",
+ "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==",
"license": "MIT",
"dependencies": {
"@types/unist": "^3.0.0",
@@ -17454,9 +19018,9 @@
}
},
"node_modules/unist-util-visit-parents": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
- "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz",
+ "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==",
"license": "MIT",
"dependencies": {
"@types/unist": "^3.0.0",
@@ -17486,9 +19050,9 @@
}
},
"node_modules/update-browserslist-db": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
- "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
+ "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
"funding": [
{
"type": "opencollective",
@@ -17695,15 +19259,6 @@
"url": "https://opencollective.com/webpack"
}
},
- "node_modules/use-sync-external-store": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz",
- "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==",
- "license": "MIT",
- "peerDependencies": {
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
- }
- },
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
@@ -17809,6 +19364,203 @@
"@vimeo/player": "2.29.0"
}
},
+ "node_modules/vite": {
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz",
+ "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "esbuild": "^0.27.0",
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3",
+ "postcss": "^8.5.6",
+ "rollup": "^4.43.0",
+ "tinyglobby": "^0.2.15"
+ },
+ "bin": {
+ "vite": "bin/vite.js"
+ },
+ "engines": {
+ "node": "^20.19.0 || >=22.12.0"
+ },
+ "funding": {
+ "url": "https://github.com/vitejs/vite?sponsor=1"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ },
+ "peerDependencies": {
+ "@types/node": "^20.19.0 || >=22.12.0",
+ "jiti": ">=1.21.0",
+ "less": "^4.0.0",
+ "lightningcss": "^1.21.0",
+ "sass": "^1.70.0",
+ "sass-embedded": "^1.70.0",
+ "stylus": ">=0.54.8",
+ "sugarss": "^5.0.0",
+ "terser": "^5.16.0",
+ "tsx": "^4.8.1",
+ "yaml": "^2.4.2"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ },
+ "jiti": {
+ "optional": true
+ },
+ "less": {
+ "optional": true
+ },
+ "lightningcss": {
+ "optional": true
+ },
+ "sass": {
+ "optional": true
+ },
+ "sass-embedded": {
+ "optional": true
+ },
+ "stylus": {
+ "optional": true
+ },
+ "sugarss": {
+ "optional": true
+ },
+ "terser": {
+ "optional": true
+ },
+ "tsx": {
+ "optional": true
+ },
+ "yaml": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite/node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/vitest": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.18.tgz",
+ "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/expect": "4.0.18",
+ "@vitest/mocker": "4.0.18",
+ "@vitest/pretty-format": "4.0.18",
+ "@vitest/runner": "4.0.18",
+ "@vitest/snapshot": "4.0.18",
+ "@vitest/spy": "4.0.18",
+ "@vitest/utils": "4.0.18",
+ "es-module-lexer": "^1.7.0",
+ "expect-type": "^1.2.2",
+ "magic-string": "^0.30.21",
+ "obug": "^2.1.1",
+ "pathe": "^2.0.3",
+ "picomatch": "^4.0.3",
+ "std-env": "^3.10.0",
+ "tinybench": "^2.9.0",
+ "tinyexec": "^1.0.2",
+ "tinyglobby": "^0.2.15",
+ "tinyrainbow": "^3.0.3",
+ "vite": "^6.0.0 || ^7.0.0",
+ "why-is-node-running": "^2.3.0"
+ },
+ "bin": {
+ "vitest": "vitest.mjs"
+ },
+ "engines": {
+ "node": "^20.0.0 || ^22.0.0 || >=24.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "@edge-runtime/vm": "*",
+ "@opentelemetry/api": "^1.9.0",
+ "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0",
+ "@vitest/browser-playwright": "4.0.18",
+ "@vitest/browser-preview": "4.0.18",
+ "@vitest/browser-webdriverio": "4.0.18",
+ "@vitest/ui": "4.0.18",
+ "happy-dom": "*",
+ "jsdom": "*"
+ },
+ "peerDependenciesMeta": {
+ "@edge-runtime/vm": {
+ "optional": true
+ },
+ "@opentelemetry/api": {
+ "optional": true
+ },
+ "@types/node": {
+ "optional": true
+ },
+ "@vitest/browser-playwright": {
+ "optional": true
+ },
+ "@vitest/browser-preview": {
+ "optional": true
+ },
+ "@vitest/browser-webdriverio": {
+ "optional": true
+ },
+ "@vitest/ui": {
+ "optional": true
+ },
+ "happy-dom": {
+ "optional": true
+ },
+ "jsdom": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vitest/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
"node_modules/watchpack": {
"version": "2.4.4",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz",
@@ -18254,6 +20006,23 @@
"node": ">= 8"
}
},
+ "node_modules/why-is-node-running": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
+ "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "siginfo": "^2.0.0",
+ "stackback": "0.0.2"
+ },
+ "bin": {
+ "why-is-node-running": "cli.js"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/widest-line": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz",
@@ -18428,10 +20197,13 @@
}
},
"node_modules/xml-js/node_modules/sax": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz",
- "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==",
- "license": "ISC"
+ "version": "1.4.4",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.4.tgz",
+ "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==",
+ "license": "BlueOak-1.0.0",
+ "engines": {
+ "node": ">=11.0.0"
+ }
},
"node_modules/yallist": {
"version": "3.1.1",
@@ -18440,9 +20212,9 @@
"license": "ISC"
},
"node_modules/yocto-queue": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz",
- "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==",
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz",
+ "integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==",
"license": "MIT",
"engines": {
"node": ">=12.20"
@@ -18457,15 +20229,6 @@
"integrity": "sha512-YHDIOAqgRpfl1Ois9HcB8UFtWOxK8KJrV5TXpImj4BKYP1rWT04f/fMM9tQ9SYZlBKukT7NR+9wcI3UpB5BMDQ==",
"license": "MIT"
},
- "node_modules/zod": {
- "version": "4.1.12",
- "resolved": "https://registry.npmjs.org/zod/-/zod-4.1.12.tgz",
- "integrity": "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/colinhacks"
- }
- },
"node_modules/zwitch": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
diff --git a/docs/package.json b/docs/package.json
index 0dc1e714..c69d436e 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -12,13 +12,17 @@
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids",
- "typecheck": "tsc"
+ "typecheck": "tsc",
+ "test": "vitest run",
+ "test:watch": "vitest watch"
},
"dependencies": {
- "@docusaurus/core": "^3.8.1",
- "@docusaurus/preset-classic": "^3.8.1",
+ "@docusaurus/core": "^3.9.2",
+ "@docusaurus/preset-classic": "^3.9.2",
+ "@iconify/react": "^6.0.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
+ "markdownlint-cli2": "^0.20.0",
"prism-react-renderer": "^2.3.0",
"raw-loader": "^4.0.2",
"react": "^18.0.0",
@@ -26,10 +30,11 @@
"react-player": "^3.3.2"
},
"devDependencies": {
- "@docusaurus/module-type-aliases": "^3.8.1",
- "@docusaurus/tsconfig": "^3.8.1",
- "@docusaurus/types": "^3.8.1",
- "typescript": "~5.5.2"
+ "@docusaurus/module-type-aliases": "^3.9.2",
+ "@docusaurus/tsconfig": "^3.9.2",
+ "@docusaurus/types": "^3.9.2",
+ "typescript": "~5.5.2",
+ "vitest": "^4.0.17"
},
"browserslist": {
"production": [
diff --git a/docs/sidebars-infrahubctl.ts b/docs/sidebars-infrahubctl.ts
deleted file mode 100644
index c50587f8..00000000
--- a/docs/sidebars-infrahubctl.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
-
-const sidebars: SidebarsConfig = {
- infrahubctlSidebar: [
- {
- type: 'doc',
- id: 'infrahubctl',
- label: 'Infrahubctl CLI Tool',
- },
- {
- type: 'category',
- label: 'Commands',
- items: [
- 'infrahubctl-branch',
- 'infrahubctl-check',
- 'infrahubctl-dump',
- 'infrahubctl-generator',
- 'infrahubctl-info',
- 'infrahubctl-load',
- 'infrahubctl-menu',
- 'infrahubctl-object',
- 'infrahubctl-protocols',
- 'infrahubctl-render',
- 'infrahubctl-repository',
- 'infrahubctl-run',
- 'infrahubctl-schema',
- 'infrahubctl-task',
- 'infrahubctl-transform',
- 'infrahubctl-validate',
- 'infrahubctl-version'
- ],
- },
- ],
-};
-
-export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars-python-sdk.ts b/docs/sidebars-python-sdk.ts
deleted file mode 100644
index e2fc932c..00000000
--- a/docs/sidebars-python-sdk.ts
+++ /dev/null
@@ -1,51 +0,0 @@
-import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
-
-const sidebars: SidebarsConfig = {
- pythonSdkSidebar: [
- {
- type: 'category',
- label: 'Python SDK docs',
- link: {
- type: 'doc',
- id: 'introduction',
- },
- items: [
- {
- type: 'category',
- label: 'Guides',
- items: [
- 'guides/installation',
- 'guides/client',
- 'guides/query_data',
- 'guides/create_update_delete',
- 'guides/branches',
- 'guides/store',
- 'guides/tracking',
- 'guides/python-typing',
- 'guides/batch',
- 'guides/object-storage',
- 'guides/resource-manager',
- ],
- },
- {
- type: 'category',
- label: 'Topics',
- items: [
- 'topics/tracking',
- 'topics/object_file',
- ],
- },
- {
- type: 'category',
- label: 'Reference',
- items: [
- 'reference/config',
- 'reference/templating',
- ],
- },
- ],
- },
- ],
-};
-
-export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars/sidebar-utils.test.ts b/docs/sidebars/sidebar-utils.test.ts
new file mode 100644
index 00000000..010a9552
--- /dev/null
+++ b/docs/sidebars/sidebar-utils.test.ts
@@ -0,0 +1,119 @@
+import { describe, expect, it } from "vitest";
+
+import { getCommandItems, getItemsWithOrder } from "./sidebar-utils";
+
+describe("getCommandItems", () => {
+ it("should filter and sort mdx command files", () => {
+ const files = [
+ "infrahubctl.mdx",
+ "infrahubctl-branch.mdx",
+ "infrahubctl-validate.mdx",
+ "infrahubctl-check.mdx",
+ ];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual([
+ "infrahubctl-branch",
+ "infrahubctl-check",
+ "infrahubctl-validate",
+ ]);
+ });
+
+ it("should exclude the index file", () => {
+ const files = ["infrahubctl.mdx", "infrahubctl-branch.mdx"];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual(["infrahubctl-branch"]);
+ });
+
+ it("should ignore non-mdx files", () => {
+ const files = ["infrahubctl-branch.mdx", "README.md", ".DS_Store", "image.png"];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual(["infrahubctl-branch"]);
+ });
+
+ it("should return an empty array when only the index file exists", () => {
+ const result = getCommandItems(["infrahubctl.mdx"]);
+
+ expect(result).toStrictEqual([]);
+ });
+
+ it("should return an empty array for an empty directory", () => {
+ const result = getCommandItems([]);
+
+ expect(result).toStrictEqual([]);
+ });
+
+ it("should support a custom index file name", () => {
+ const files = ["index.mdx", "command-a.mdx", "command-b.mdx"];
+
+ const result = getCommandItems(files, "index.mdx");
+
+ expect(result).toStrictEqual(["command-a", "command-b"]);
+ });
+});
+
+describe("getItemsWithOrder", () => {
+ it("should preserve the defined order for known items", () => {
+ const files = ["client.mdx", "installation.mdx", "batch.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation", "guides/client", "guides/batch"]);
+ });
+
+ it("should append new files sorted alphabetically after ordered items", () => {
+ const files = ["client.mdx", "installation.mdx", "batch.mdx", "new-guide.mdx", "advanced.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual([
+ "guides/installation",
+ "guides/client",
+ "guides/batch",
+ "guides/advanced",
+ "guides/new-guide",
+ ]);
+ });
+
+ it("should skip ordered items that no longer exist on disk", () => {
+ const files = ["installation.mdx", "batch.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation", "guides/batch"]);
+ });
+
+ it("should ignore non-mdx files", () => {
+ const files = ["installation.mdx", "README.md", ".DS_Store"];
+ const orderedIds = ["guides/installation"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation"]);
+ });
+
+ it("should work without a prefix", () => {
+ const files = ["tracking.mdx", "object_file.mdx", "new-topic.mdx"];
+ const orderedIds = ["tracking", "object_file"];
+
+ const result = getItemsWithOrder(files, orderedIds);
+
+ expect(result).toStrictEqual(["tracking", "object_file", "new-topic"]);
+ });
+
+ it("should return all files sorted when no ordered ids are provided", () => {
+ const files = ["batch.mdx", "installation.mdx", "client.mdx"];
+
+ const result = getItemsWithOrder(files, [], "guides");
+
+ expect(result).toStrictEqual(["guides/batch", "guides/client", "guides/installation"]);
+ });
+});
diff --git a/docs/sidebars/sidebar-utils.ts b/docs/sidebars/sidebar-utils.ts
new file mode 100644
index 00000000..0860904c
--- /dev/null
+++ b/docs/sidebars/sidebar-utils.ts
@@ -0,0 +1,23 @@
+export function getCommandItems(files: string[], indexFile: string = 'infrahubctl.mdx'): string[] {
+ return files
+ .filter(file => file.endsWith('.mdx') && file !== indexFile)
+ .map(file => file.replace('.mdx', ''))
+ .sort();
+}
+
+export function getItemsWithOrder(files: string[], orderedIds: string[], prefix: string = ''): string[] {
+ const allIds = files
+ .filter(file => file.endsWith('.mdx'))
+ .map(file => {
+ const base = file.replace('.mdx', '');
+ return prefix ? `${prefix}/${base}` : base;
+ });
+
+ const existingIds = new Set(allIds);
+ const ordered = orderedIds.filter(id => existingIds.has(id));
+
+ const orderedSet = new Set(orderedIds);
+ const remaining = allIds.filter(id => !orderedSet.has(id)).sort();
+
+ return [...ordered, ...remaining];
+}
diff --git a/docs/sidebars/sidebars-infrahubctl.ts b/docs/sidebars/sidebars-infrahubctl.ts
new file mode 100644
index 00000000..bd92511d
--- /dev/null
+++ b/docs/sidebars/sidebars-infrahubctl.ts
@@ -0,0 +1,24 @@
+import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
+import {readdirSync} from 'fs';
+import {join} from 'path';
+import {getCommandItems} from './sidebar-utils';
+
+const docsDir = join(__dirname, '..', 'docs', 'infrahubctl');
+const commandItems = getCommandItems(readdirSync(docsDir));
+
+const sidebars: SidebarsConfig = {
+ infrahubctlSidebar: [
+ {
+ type: 'doc',
+ id: 'infrahubctl',
+ label: 'Infrahubctl CLI Tool',
+ },
+ {
+ type: 'category',
+ label: 'Commands',
+ items: commandItems,
+ },
+ ],
+};
+
+export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars/sidebars-python-sdk.ts b/docs/sidebars/sidebars-python-sdk.ts
new file mode 100644
index 00000000..3adbac3e
--- /dev/null
+++ b/docs/sidebars/sidebars-python-sdk.ts
@@ -0,0 +1,86 @@
+import type { SidebarsConfig } from '@docusaurus/plugin-content-docs';
+import { readdirSync } from 'fs';
+import { join } from 'path';
+import { getItemsWithOrder } from './sidebar-utils';
+
+const pythonSdkDocsDir = join(__dirname, '..', 'docs', 'python-sdk');
+
+const guidesItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'guides')),
+ [
+ 'guides/installation',
+ 'guides/client',
+ 'guides/query_data',
+ 'guides/create_update_delete',
+ 'guides/branches',
+ 'guides/store',
+ 'guides/tracking',
+ 'guides/python-typing',
+ 'guides/batch',
+ 'guides/object-storage',
+ 'guides/resource-manager',
+ ],
+ 'guides',
+);
+
+const topicsItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'topics')),
+ [
+ 'topics/tracking',
+ 'topics/object_file',
+ ],
+ 'topics',
+);
+
+const referenceItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'reference')),
+ [
+ 'reference/config',
+ 'reference/templating',
+ ],
+ 'reference',
+);
+
+const sidebars: SidebarsConfig = {
+ pythonSdkSidebar: [
+ {
+ type: 'category',
+ label: 'Python SDK docs',
+ link: {
+ type: 'doc',
+ id: 'introduction',
+ },
+ items: [
+ {
+ type: 'category',
+ label: 'Guides',
+ items: guidesItems,
+ },
+ {
+ type: 'category',
+ label: 'Topics',
+ items: topicsItems,
+ },
+ {
+ type: 'category',
+ label: 'Reference',
+ items: [
+ {
+ type: 'category',
+ label: 'Python SDK API',
+ items: [
+ {
+ type: 'autogenerated',
+ dirName: 'sdk_ref',
+ },
+ ],
+ },
+ ...referenceItems,
+ ],
+ },
+ ],
+ },
+ ],
+};
+
+export default sidebars;
\ No newline at end of file
diff --git a/docs/src/theme/MDXComponents.js b/docs/src/theme/MDXComponents.js
new file mode 100644
index 00000000..682ed38f
--- /dev/null
+++ b/docs/src/theme/MDXComponents.js
@@ -0,0 +1,10 @@
+import React from 'react';
+// Import the original mapper
+import MDXComponents from '@theme-original/MDXComponents';
+import { Icon } from '@iconify/react'; // Import the entire Iconify library.
+
+export default {
+ // Re-use the default mapping
+ ...MDXComponents,
+ Icon: Icon, // Make the iconify Icon component available in MDX as .
+};
\ No newline at end of file
diff --git a/docs/vitest.config.ts b/docs/vitest.config.ts
new file mode 100644
index 00000000..7d2a0b3b
--- /dev/null
+++ b/docs/vitest.config.ts
@@ -0,0 +1,8 @@
+import { defineConfig } from "vitest/config";
+
+export default defineConfig({
+ test: {
+ include: ["**/*.test.ts"],
+ exclude: ["**/node_modules/**", "**/build/**"],
+ },
+});
diff --git a/infrahub_sdk/branch.py b/infrahub_sdk/branch.py
index 2c32a481..4c89bd41 100644
--- a/infrahub_sdk/branch.py
+++ b/infrahub_sdk/branch.py
@@ -19,6 +19,7 @@ class BranchStatus(str, Enum):
NEED_REBASE = "NEED_REBASE"
NEED_UPGRADE_REBASE = "NEED_UPGRADE_REBASE"
DELETING = "DELETING"
+ MERGED = "MERGED"
class BranchData(BaseModel):
diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py
index 92cebdea..988d30bd 100644
--- a/infrahub_sdk/client.py
+++ b/infrahub_sdk/client.py
@@ -5,18 +5,12 @@
import logging
import time
import warnings
-from collections.abc import Callable, Coroutine, Mapping, MutableMapping
+from collections.abc import AsyncIterator, Callable, Coroutine, Iterator, Mapping, MutableMapping
+from contextlib import asynccontextmanager, contextmanager
from datetime import datetime
from functools import wraps
from time import sleep
-from typing import (
- TYPE_CHECKING,
- Any,
- Literal,
- TypedDict,
- TypeVar,
- overload,
-)
+from typing import TYPE_CHECKING, Any, BinaryIO, Literal, TypedDict, TypeVar, overload
from urllib.parse import urlencode
import httpx
@@ -24,12 +18,7 @@
from typing_extensions import Self
from .batch import InfrahubBatch, InfrahubBatchSync
-from .branch import (
- MUTATION_QUERY_TASK,
- BranchData,
- InfrahubBranchManager,
- InfrahubBranchManagerSync,
-)
+from .branch import MUTATION_QUERY_TASK, BranchData, InfrahubBranchManager, InfrahubBranchManagerSync
from .config import Config
from .constants import InfrahubClientMode
from .convert_object_type import CONVERT_OBJECT_MUTATION, ConversionFieldInput
@@ -44,11 +33,8 @@
ServerNotResponsiveError,
URLNotFoundError,
)
-from .graphql import Mutation, Query
-from .node import (
- InfrahubNode,
- InfrahubNodeSync,
-)
+from .graphql import MultipartBuilder, Mutation, Query
+from .node import InfrahubNode, InfrahubNodeSync
from .object_store import ObjectStore, ObjectStoreSync
from .protocols_base import CoreNode, CoreNodeSync
from .queries import QUERY_USER, get_commit_update_mutation
@@ -582,7 +568,7 @@ async def _process_nodes_and_relationships(
response (dict[str, Any]): The response from the GraphQL query.
schema_kind (str): The kind of schema being queried.
branch (str): The branch name.
- prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
+ prefetch_relationships (bool): Flag to indicate whether to pre-fetch relationship data.
timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
Returns:
@@ -712,7 +698,7 @@ async def all(
include (list[str], optional): List of attributes or relationships to include in the query.
exclude (list[str], optional): List of attributes or relationships to exclude from the query.
fragment (bool, optional): Flag to use GraphQL fragments for generic schemas.
- prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data.
+ prefetch_relationships (bool, optional): Flag to indicate whether to pre-fetch related node data.
parallel (bool, optional): Whether to use parallel processing for the query.
order (Order, optional): Ordering related options. Setting `disable=True` enhances performances.
include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query.
@@ -815,7 +801,7 @@ async def filters(
include (list[str], optional): List of attributes or relationships to include in the query.
exclude (list[str], optional): List of attributes or relationships to exclude from the query.
fragment (bool, optional): Flag to use GraphQL fragments for generic schemas.
- prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data.
+ prefetch_relationships (bool, optional): Flag to indicate whether to pre-fetch related node data.
partial_match (bool, optional): Allow partial match of filter criteria for the query.
parallel (bool, optional): Whether to use parallel processing for the query.
order (Order, optional): Ordering related options. Setting `disable=True` enhances performances.
@@ -933,7 +919,7 @@ async def execute_graphql(
tracker: str | None = None,
) -> dict:
"""Execute a GraphQL query (or mutation).
- If retry_on_failure is True, the query will retry until the server becomes reacheable.
+ If retry_on_failure is True, the query will retry until the server becomes reachable.
Args:
query (_type_): GraphQL Query to execute, can be a query or a mutation
@@ -994,7 +980,7 @@ async def execute_graphql(
messages = [error.get("message") for error in errors]
raise AuthenticationError(" | ".join(messages)) from exc
if exc.response.status_code == 404:
- raise URLNotFoundError(url=url)
+ raise URLNotFoundError(url=url) from exc
if not resp:
raise Error("Unexpected situation, resp hasn't been initialized.")
@@ -1008,6 +994,128 @@ async def execute_graphql(
# TODO add a special method to execute mutation that will check if the method returned OK
+ async def _execute_graphql_with_file(
+ self,
+ query: str,
+ variables: dict | None = None,
+ file_content: BinaryIO | None = None,
+ file_name: str | None = None,
+ branch_name: str | None = None,
+ timeout: int | None = None,
+ tracker: str | None = None,
+ ) -> dict:
+ """Execute a GraphQL mutation with a file upload using multipart/form-data.
+
+ This method follows the GraphQL Multipart Request Spec for file uploads.
+ The file is attached to the 'file' variable in the mutation.
+
+ Args:
+ query: GraphQL mutation query that includes a $file variable of type Upload!
+ variables: Variables to pass along with the GraphQL query.
+ file_content: The file content as a file-like object (BinaryIO).
+ file_name: The name of the file being uploaded.
+ branch_name: Name of the branch on which the mutation will be executed.
+ timeout: Timeout in seconds for the query.
+ tracker: Optional tracker for request tracing.
+
+ Raises:
+ GraphQLError: When the GraphQL response contains errors.
+
+ Returns:
+ dict: The GraphQL data payload (response["data"]).
+ """
+ branch_name = branch_name or self.default_branch
+ url = self._graphql_url(branch_name=branch_name)
+
+ # Prepare variables with file placeholder
+ variables = variables or {}
+ variables["file"] = None
+
+ headers = copy.copy(self.headers or {})
+ # Remove content-type header - httpx will set it for multipart
+ headers.pop("content-type", None)
+ if self.insert_tracker and tracker:
+ headers["X-Infrahub-Tracker"] = tracker
+
+ self._echo(url=url, query=query, variables=variables)
+
+ resp = await self._post_multipart(
+ url=url,
+ query=query,
+ variables=variables,
+ file_content=file_content,
+ file_name=file_name or "upload",
+ headers=headers,
+ timeout=timeout,
+ )
+
+ resp.raise_for_status()
+ response = decode_json(response=resp)
+
+ if "errors" in response:
+ raise GraphQLError(errors=response["errors"], query=query, variables=variables)
+
+ return response["data"]
+
+ @handle_relogin
+ async def _post_multipart(
+ self,
+ url: str,
+ query: str,
+ variables: dict,
+ file_content: BinaryIO | None,
+ file_name: str,
+ headers: dict | None = None,
+ timeout: int | None = None,
+ ) -> httpx.Response:
+ """Execute a HTTP POST with multipart/form-data for GraphQL file uploads.
+
+ The file_content is streamed directly from the file-like object, avoiding loading the entire file into memory for large files.
+ """
+ await self.login()
+
+ headers = headers or {}
+ base_headers = copy.copy(self.headers or {})
+ # Remove content-type from base headers - httpx will set it for multipart
+ base_headers.pop("content-type", None)
+ headers.update(base_headers)
+
+ # Build the multipart form data according to GraphQL Multipart Request Spec
+ files = MultipartBuilder.build_payload(
+ query=query, variables=variables, file_content=file_content, file_name=file_name
+ )
+
+ return await self._request_multipart(
+ url=url, headers=headers, timeout=timeout or self.default_timeout, files=files
+ )
+
+ def _build_proxy_config(self) -> ProxyConfig:
+ """Build proxy configuration for httpx AsyncClient."""
+ proxy_config: ProxyConfig = {"proxy": None, "mounts": None}
+ if self.config.proxy:
+ proxy_config["proxy"] = self.config.proxy
+ elif self.config.proxy_mounts.is_set:
+ proxy_config["mounts"] = {
+ key: httpx.AsyncHTTPTransport(proxy=value)
+ for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items()
+ }
+ return proxy_config
+
+ async def _request_multipart(
+ self, url: str, headers: dict[str, Any], timeout: int, files: dict[str, Any]
+ ) -> httpx.Response:
+ """Execute a multipart HTTP POST request."""
+ async with httpx.AsyncClient(**self._build_proxy_config(), verify=self.config.tls_context) as client:
+ try:
+ response = await client.post(url=url, headers=headers, timeout=timeout, files=files)
+ except httpx.NetworkError as exc:
+ raise ServerNotReachableError(address=self.address) from exc
+ except httpx.ReadTimeout as exc:
+ raise ServerNotResponsiveError(url=url, timeout=timeout) from exc
+
+ self._record(response)
+ return response
+
@handle_relogin
async def _post(
self,
@@ -1057,6 +1165,36 @@ async def _get(self, url: str, headers: dict | None = None, timeout: int | None
timeout=timeout or self.default_timeout,
)
+ @asynccontextmanager
+ async def _get_streaming(
+ self, url: str, headers: dict | None = None, timeout: int | None = None
+ ) -> AsyncIterator[httpx.Response]:
+ """Execute a streaming HTTP GET with HTTPX.
+
+ Returns an async context manager that yields the streaming response.
+ Use this for downloading large files without loading into memory.
+
+ Raises:
+ ServerNotReachableError if we are not able to connect to the server
+ ServerNotResponsiveError if the server didn't respond before the timeout expired
+ """
+ await self.login()
+
+ headers = headers or {}
+ base_headers = copy.copy(self.headers or {})
+ headers.update(base_headers)
+
+ async with httpx.AsyncClient(**self._build_proxy_config(), verify=self.config.tls_context) as client:
+ try:
+ async with client.stream(
+ method="GET", url=url, headers=headers, timeout=timeout or self.default_timeout
+ ) as response:
+ yield response
+ except httpx.NetworkError as exc:
+ raise ServerNotReachableError(address=self.address) from exc
+ except httpx.ReadTimeout as exc:
+ raise ServerNotResponsiveError(url=url, timeout=timeout or self.default_timeout) from exc
+
async def _request(
self,
url: str,
@@ -1081,19 +1219,7 @@ async def _default_request_method(
if payload:
params["json"] = payload
- proxy_config: ProxyConfig = {"proxy": None, "mounts": None}
- if self.config.proxy:
- proxy_config["proxy"] = self.config.proxy
- elif self.config.proxy_mounts.is_set:
- proxy_config["mounts"] = {
- key: httpx.AsyncHTTPTransport(proxy=value)
- for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items()
- }
-
- async with httpx.AsyncClient(
- **proxy_config,
- verify=self.config.tls_context,
- ) as client:
+ async with httpx.AsyncClient(**self._build_proxy_config(), verify=self.config.tls_context) as client:
try:
response = await client.request(
method=method.value,
@@ -1742,10 +1868,11 @@ async def convert_object_type(
for more information.
"""
- if fields_mapping is None:
- mapping_dict = {}
- else:
- mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
+ mapping_dict = (
+ {}
+ if fields_mapping is None
+ else {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
+ )
branch_name = branch or self.default_branch
response = await self.execute_graphql(
@@ -1848,7 +1975,7 @@ def execute_graphql(
tracker: str | None = None,
) -> dict:
"""Execute a GraphQL query (or mutation).
- If retry_on_failure is True, the query will retry until the server becomes reacheable.
+ If retry_on_failure is True, the query will retry until the server becomes reachable.
Args:
query (str): GraphQL Query to execute, can be a query or a mutation
@@ -1910,7 +2037,7 @@ def execute_graphql(
messages = [error.get("message") for error in errors]
raise AuthenticationError(" | ".join(messages)) from exc
if exc.response.status_code == 404:
- raise URLNotFoundError(url=url)
+ raise URLNotFoundError(url=url) from exc
if not resp:
raise Error("Unexpected situation, resp hasn't been initialized.")
@@ -1924,6 +2051,126 @@ def execute_graphql(
# TODO add a special method to execute mutation that will check if the method returned OK
+ def _execute_graphql_with_file(
+ self,
+ query: str,
+ variables: dict | None = None,
+ file_content: BinaryIO | None = None,
+ file_name: str | None = None,
+ branch_name: str | None = None,
+ timeout: int | None = None,
+ tracker: str | None = None,
+ ) -> dict:
+ """Execute a GraphQL mutation with a file upload using multipart/form-data.
+
+ This method follows the GraphQL Multipart Request Spec for file uploads.
+ The file is attached to the 'file' variable in the mutation.
+
+ Args:
+ query: GraphQL mutation query that includes a $file variable of type Upload!
+ variables: Variables to pass along with the GraphQL query.
+ file_content: The file content as a file-like object (BinaryIO).
+ file_name: The name of the file being uploaded.
+ branch_name: Name of the branch on which the mutation will be executed.
+ timeout: Timeout in seconds for the query.
+ tracker: Optional tracker for request tracing.
+
+ Raises:
+ GraphQLError: When the GraphQL response contains errors.
+
+ Returns:
+ dict: The GraphQL data payload (response["data"]).
+ """
+ branch_name = branch_name or self.default_branch
+ url = self._graphql_url(branch_name=branch_name)
+
+ # Prepare variables with file placeholder
+ variables = variables or {}
+ variables["file"] = None
+
+ headers = copy.copy(self.headers or {})
+ # Remove content-type header - httpx will set it for multipart
+ headers.pop("content-type", None)
+ if self.insert_tracker and tracker:
+ headers["X-Infrahub-Tracker"] = tracker
+
+ self._echo(url=url, query=query, variables=variables)
+
+ resp = self._post_multipart(
+ url=url,
+ query=query,
+ variables=variables,
+ file_content=file_content,
+ file_name=file_name or "upload",
+ headers=headers,
+ timeout=timeout,
+ )
+
+ resp.raise_for_status()
+ response = decode_json(response=resp)
+
+ if "errors" in response:
+ raise GraphQLError(errors=response["errors"], query=query, variables=variables)
+
+ return response["data"]
+
+ @handle_relogin_sync
+ def _post_multipart(
+ self,
+ url: str,
+ query: str,
+ variables: dict,
+ file_content: BinaryIO | None,
+ file_name: str,
+ headers: dict | None = None,
+ timeout: int | None = None,
+ ) -> httpx.Response:
+ """Execute a HTTP POST with multipart/form-data for GraphQL file uploads.
+
+ The file_content is streamed directly from the file-like object, avoiding loading the entire file into memory for large files.
+ """
+ self.login()
+
+ headers = headers or {}
+ base_headers = copy.copy(self.headers or {})
+ # Remove content-type from base headers - httpx will set it for multipart
+ base_headers.pop("content-type", None)
+ headers.update(base_headers)
+
+ # Build the multipart form data according to GraphQL Multipart Request Spec
+ files = MultipartBuilder.build_payload(
+ query=query, variables=variables, file_content=file_content, file_name=file_name
+ )
+
+ return self._request_multipart(url=url, headers=headers, timeout=timeout or self.default_timeout, files=files)
+
+ def _build_proxy_config(self) -> ProxyConfigSync:
+ """Build proxy configuration for httpx Client."""
+ proxy_config: ProxyConfigSync = {"proxy": None, "mounts": None}
+ if self.config.proxy:
+ proxy_config["proxy"] = self.config.proxy
+ elif self.config.proxy_mounts.is_set:
+ proxy_config["mounts"] = {
+ key: httpx.HTTPTransport(proxy=value)
+ for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items()
+ }
+ return proxy_config
+
+ def _request_multipart(
+ self, url: str, headers: dict[str, Any], timeout: int, files: dict[str, Any]
+ ) -> httpx.Response:
+ """Execute a multipart HTTP POST request."""
+ with httpx.Client(**self._build_proxy_config(), verify=self.config.tls_context) as client:
+ try:
+ response = client.post(url=url, headers=headers, timeout=timeout, files=files)
+ except httpx.NetworkError as exc:
+ raise ServerNotReachableError(address=self.address) from exc
+ except httpx.ReadTimeout as exc:
+ raise ServerNotResponsiveError(url=url, timeout=timeout) from exc
+
+ self._record(response)
+ return response
+
def count(
self,
kind: str | type[SchemaType],
@@ -2028,7 +2275,7 @@ def all(
include (list[str], optional): List of attributes or relationships to include in the query.
exclude (list[str], optional): List of attributes or relationships to exclude from the query.
fragment (bool, optional): Flag to use GraphQL fragments for generic schemas.
- prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data.
+ prefetch_relationships (bool, optional): Flag to indicate whether to pre-fetch related node data.
parallel (bool, optional): Whether to use parallel processing for the query.
order (Order, optional): Ordering related options. Setting `disable=True` enhances performances.
include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query.
@@ -2069,7 +2316,7 @@ def _process_nodes_and_relationships(
response (dict[str, Any]): The response from the GraphQL query.
schema_kind (str): The kind of schema being queried.
branch (str): The branch name.
- prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
+ prefetch_relationships (bool): Flag to indicate whether to pre-fetch relationship data.
timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
Returns:
@@ -2172,7 +2419,7 @@ def filters(
include (list[str], optional): List of attributes or relationships to include in the query.
exclude (list[str], optional): List of attributes or relationships to exclude from the query.
fragment (bool, optional): Flag to use GraphQL fragments for generic schemas.
- prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data.
+ prefetch_relationships (bool, optional): Flag to indicate whether to pre-fetch related node data.
partial_match (bool, optional): Allow partial match of filter criteria for the query.
parallel (bool, optional): Whether to use parallel processing for the query.
order (Order, optional): Ordering related options. Setting `disable=True` enhances performances.
@@ -2995,6 +3242,36 @@ def _get(self, url: str, headers: dict | None = None, timeout: int | None = None
timeout=timeout or self.default_timeout,
)
+ @contextmanager
+ def _get_streaming(
+ self, url: str, headers: dict | None = None, timeout: int | None = None
+ ) -> Iterator[httpx.Response]:
+ """Execute a streaming HTTP GET with HTTPX.
+
+ Returns a context manager that yields the streaming response.
+ Use this for downloading large files without loading into memory.
+
+ Raises:
+ ServerNotReachableError if we are not able to connect to the server
+ ServerNotResponsiveError if the server didn't respond before the timeout expired
+ """
+ self.login()
+
+ headers = headers or {}
+ base_headers = copy.copy(self.headers or {})
+ headers.update(base_headers)
+
+ with httpx.Client(**self._build_proxy_config(), verify=self.config.tls_context) as client:
+ try:
+ with client.stream(
+ method="GET", url=url, headers=headers, timeout=timeout or self.default_timeout
+ ) as response:
+ yield response
+ except httpx.NetworkError as exc:
+ raise ServerNotReachableError(address=self.address) from exc
+ except httpx.ReadTimeout as exc:
+ raise ServerNotResponsiveError(url=url, timeout=timeout or self.default_timeout) from exc
+
@handle_relogin_sync
def _post(
self,
@@ -3047,20 +3324,7 @@ def _default_request_method(
if payload:
params["json"] = payload
- proxy_config: ProxyConfigSync = {"proxy": None, "mounts": None}
-
- if self.config.proxy:
- proxy_config["proxy"] = self.config.proxy
- elif self.config.proxy_mounts.is_set:
- proxy_config["mounts"] = {
- key: httpx.HTTPTransport(proxy=value)
- for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items()
- }
-
- with httpx.Client(
- **proxy_config,
- verify=self.config.tls_context,
- ) as client:
+ with httpx.Client(**self._build_proxy_config(), verify=self.config.tls_context) as client:
try:
response = client.request(
method=method.value,
@@ -3162,10 +3426,11 @@ def convert_object_type(
for more information.
"""
- if fields_mapping is None:
- mapping_dict = {}
- else:
- mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
+ mapping_dict = (
+ {}
+ if fields_mapping is None
+ else {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
+ )
branch_name = branch or self.default_branch
response = self.execute_graphql(
diff --git a/infrahub_sdk/ctl/branch.py b/infrahub_sdk/ctl/branch.py
index 60d67e86..d309c1d5 100644
--- a/infrahub_sdk/ctl/branch.py
+++ b/infrahub_sdk/ctl/branch.py
@@ -110,6 +110,10 @@ def generate_proposed_change_tables(proposed_changes: list[CoreProposedChange])
proposed_change_tables: list[Table] = []
for pc in proposed_changes:
+ metadata = pc.get_node_metadata()
+ created_by = metadata.created_by.display_label if metadata and metadata.created_by else "-"
+ created_at = format_timestamp(metadata.created_at) if metadata and metadata.created_at else "-"
+
# Create proposal table
proposed_change_table = Table(show_header=False, box=None)
proposed_change_table.add_column(justify="left")
@@ -119,8 +123,8 @@ def generate_proposed_change_tables(proposed_changes: list[CoreProposedChange])
proposed_change_table.add_row("Name", pc.name.value)
proposed_change_table.add_row("State", str(pc.state.value))
proposed_change_table.add_row("Is draft", "Yes" if pc.is_draft.value else "No")
- proposed_change_table.add_row("Created by", pc.created_by.peer.name.value) # type: ignore[union-attr]
- proposed_change_table.add_row("Created at", format_timestamp(str(pc.created_by.updated_at)))
+ proposed_change_table.add_row("Created by", created_by)
+ proposed_change_table.add_row("Created at", created_at)
proposed_change_table.add_row("Approvals", str(len(pc.approved_by.peers)))
proposed_change_table.add_row("Rejections", str(len(pc.rejected_by.peers)))
@@ -295,9 +299,9 @@ async def report(
proposed_changes = await client.filters(
kind=CoreProposedChange, # type: ignore[type-abstract]
source_branch__value=branch_name,
- include=["created_by"],
prefetch_relationships=True,
property=True,
+ include_metadata=True,
)
branch_table = generate_branch_report_table(branch=branch, diff_tree=diff_tree, git_files_changed=git_files_changed)
diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py
index 2b571723..d7a636ed 100644
--- a/infrahub_sdk/ctl/cli_commands.py
+++ b/infrahub_sdk/ctl/cli_commands.py
@@ -239,7 +239,7 @@ async def _run_transform(
elif isinstance(error, str) and "Branch:" in error:
console.print(f"[yellow] - {error}")
console.print("[yellow] you can specify a different branch with --branch")
- raise typer.Abort
+ raise typer.Abort from None
if inspect.iscoroutinefunction(transform_func):
output = await transform_func(response)
@@ -350,10 +350,7 @@ def transform(
# Run Transform
result = asyncio.run(transform.run(data=data))
- if isinstance(result, str):
- json_string = result
- else:
- json_string = ujson.dumps(result, indent=2, sort_keys=True)
+ json_string = result if isinstance(result, str) else ujson.dumps(result, indent=2, sort_keys=True)
if out:
write_to_file(Path(out), json_string)
diff --git a/infrahub_sdk/ctl/config.py b/infrahub_sdk/ctl/config.py
index b3d2a404..a5b522b2 100644
--- a/infrahub_sdk/ctl/config.py
+++ b/infrahub_sdk/ctl/config.py
@@ -90,7 +90,7 @@ def load_and_exit(self, config_file: str | Path = "infrahubctl.toml", config_dat
for error in exc.errors():
loc_str = [str(item) for item in error["loc"]]
print(f" {'/'.join(loc_str)} | {error['msg']} ({error['type']})")
- raise typer.Abort
+ raise typer.Abort from None
SETTINGS = ConfiguredSettings()
diff --git a/infrahub_sdk/ctl/exporter.py b/infrahub_sdk/ctl/exporter.py
index ae5e5d18..402b47ec 100644
--- a/infrahub_sdk/ctl/exporter.py
+++ b/infrahub_sdk/ctl/exporter.py
@@ -46,4 +46,4 @@ def dump(
aiorun(exporter.export(export_directory=directory, namespaces=namespace, branch=branch, exclude=exclude))
except TransferError as exc:
console.print(f"[red]{exc}")
- raise typer.Exit(1)
+ raise typer.Exit(1) from None
diff --git a/infrahub_sdk/ctl/importer.py b/infrahub_sdk/ctl/importer.py
index 420c6d75..d3318eb5 100644
--- a/infrahub_sdk/ctl/importer.py
+++ b/infrahub_sdk/ctl/importer.py
@@ -50,4 +50,4 @@ def load(
aiorun(importer.import_data(import_directory=directory, branch=branch))
except TransferError as exc:
console.print(f"[red]{exc}")
- raise typer.Exit(1)
+ raise typer.Exit(1) from None
diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py
index 968f6093..7130ea80 100644
--- a/infrahub_sdk/ctl/utils.py
+++ b/infrahub_sdk/ctl/utils.py
@@ -51,7 +51,7 @@ def init_logging(debug: bool = False) -> None:
def handle_exception(exc: Exception, console: Console, exit_code: int) -> NoReturn:
- """Handle exeception in a different fashion based on its type."""
+ """Handle exception in a different fashion based on its type."""
if isinstance(exc, Exit):
raise typer.Exit(code=exc.exit_code)
if isinstance(exc, AuthenticationError):
diff --git a/infrahub_sdk/ctl/validate.py b/infrahub_sdk/ctl/validate.py
index 3ffbd85a..07256faf 100644
--- a/infrahub_sdk/ctl/validate.py
+++ b/infrahub_sdk/ctl/validate.py
@@ -48,7 +48,7 @@ async def validate_schema(schema: Path, _: str = CONFIG_PARAM) -> None:
for error in exc.errors():
loc_str = [str(item) for item in error["loc"]]
console.print(f" '{'/'.join(loc_str)}' | {error['msg']} ({error['type']})")
- raise typer.Exit(1)
+ raise typer.Exit(1) from None
console.print("[green]Schema is valid !!")
diff --git a/infrahub_sdk/exceptions.py b/infrahub_sdk/exceptions.py
index e1dba1d5..727239bf 100644
--- a/infrahub_sdk/exceptions.py
+++ b/infrahub_sdk/exceptions.py
@@ -136,7 +136,7 @@ def __init__(self, position: list[int | str], message: str) -> None:
super().__init__(self.message)
def __str__(self) -> str:
- return f"{'.'.join(map(str, self.position))}: {self.message}"
+ return f"{'.'.join(str(p) for p in self.position)}: {self.message}"
class AuthenticationError(Error):
diff --git a/infrahub_sdk/file_handler.py b/infrahub_sdk/file_handler.py
new file mode 100644
index 00000000..5d32441a
--- /dev/null
+++ b/infrahub_sdk/file_handler.py
@@ -0,0 +1,348 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from io import BytesIO
+from pathlib import Path
+from typing import TYPE_CHECKING, BinaryIO, cast, overload
+
+import anyio
+import httpx
+
+from .exceptions import AuthenticationError, NodeNotFoundError, ServerNotReachableError
+
+if TYPE_CHECKING:
+ from .client import InfrahubClient, InfrahubClientSync
+
+
+@dataclass
+class PreparedFile:
+ file_object: BinaryIO | None
+ filename: str | None
+ should_close: bool
+
+
+class FileHandlerBase:
+ """Base class for file handling operations.
+
+ Provides common functionality for both async and sync file handlers, including upload preparation and error handling.
+ """
+
+ @staticmethod
+ async def prepare_upload(content: bytes | Path | BinaryIO | None, name: str | None = None) -> PreparedFile:
+ """Prepare file content for upload (async version).
+
+ Converts various content types to a consistent BinaryIO interface for streaming uploads.
+ For Path inputs, opens the file handle in a thread pool to avoid blocking the event loop.
+ The actual file reading is streamed by httpx during the HTTP request.
+
+ Args:
+ content: The file content as bytes, a Path to a file, or a file-like object.
+ Can be None if no file is set.
+ name: Optional filename. If not provided and content is a Path,
+ the filename will be derived from the path.
+
+ Returns:
+ A PreparedFile containing the file object, filename, and whether it should be closed.
+ """
+ if content is None:
+ return PreparedFile(file_object=None, filename=None, should_close=False)
+
+ if name is None and isinstance(content, Path):
+ name = content.name
+
+ filename = name or "uploaded_file"
+
+ if isinstance(content, bytes):
+ return PreparedFile(file_object=BytesIO(content), filename=filename, should_close=False)
+ if isinstance(content, Path):
+ # Open file in thread pool to avoid blocking the event loop
+ # Returns a sync file handle that httpx can stream from in chunks
+ file_obj = await anyio.to_thread.run_sync(content.open, "rb")
+ return PreparedFile(file_object=cast("BinaryIO", file_obj), filename=filename, should_close=True)
+
+ # At this point, content must be a BinaryIO (file-like object)
+ return PreparedFile(file_object=cast("BinaryIO", content), filename=filename, should_close=False)
+
+ @staticmethod
+ def prepare_upload_sync(content: bytes | Path | BinaryIO | None, name: str | None = None) -> PreparedFile:
+ """Prepare file content for upload (sync version).
+
+ Converts various content types to a consistent BinaryIO interface for streaming uploads.
+
+ Args:
+ content: The file content as bytes, a Path to a file, or a file-like object.
+ Can be None if no file is set.
+ name: Optional filename. If not provided and content is a Path,
+ the filename will be derived from the path.
+
+ Returns:
+ A PreparedFile containing the file object, filename, and whether it should be closed.
+ """
+ if content is None:
+ return PreparedFile(file_object=None, filename=None, should_close=False)
+
+ if name is None and isinstance(content, Path):
+ name = content.name
+
+ filename = name or "uploaded_file"
+
+ if isinstance(content, bytes):
+ return PreparedFile(file_object=BytesIO(content), filename=filename, should_close=False)
+ if isinstance(content, Path):
+ return PreparedFile(file_object=content.open("rb"), filename=filename, should_close=True)
+
+ # At this point, content must be a BinaryIO (file-like object)
+ return PreparedFile(file_object=cast("BinaryIO", content), filename=filename, should_close=False)
+
+ @staticmethod
+ def handle_error_response(exc: httpx.HTTPStatusError) -> None:
+ """Handle HTTP error responses for file operations.
+
+ Args:
+ exc: The HTTP status error from httpx.
+
+ Raises:
+ AuthenticationError: If authentication fails (401/403).
+ NodeNotFoundError: If the file/node is not found (404).
+ httpx.HTTPStatusError: For other HTTP errors.
+ """
+ if exc.response.status_code in {401, 403}:
+ response = exc.response.json()
+ errors = response.get("errors", [])
+ messages = [error.get("message") for error in errors]
+ raise AuthenticationError(" | ".join(messages)) from exc
+ if exc.response.status_code == 404:
+ response = exc.response.json()
+ detail = response.get("detail", "File not found")
+ raise NodeNotFoundError(node_type="FileObject", identifier=detail) from exc
+ raise exc
+
+ @staticmethod
+ def handle_response(resp: httpx.Response) -> bytes:
+ """Handle the HTTP response and return file content as bytes.
+
+ Args:
+ resp: The HTTP response from httpx.
+
+ Returns:
+ The file content as bytes.
+
+ Raises:
+ AuthenticationError: If authentication fails.
+ NodeNotFoundError: If the file is not found.
+ """
+ try:
+ resp.raise_for_status()
+ except httpx.HTTPStatusError as exc:
+ FileHandlerBase.handle_error_response(exc=exc)
+ return resp.content
+
+
+class FileHandler(FileHandlerBase):
+ """Async file handler for download operations.
+
+ Handles file downloads with support for streaming to disk
+ for memory-efficient handling of large files.
+ """
+
+ def __init__(self, client: InfrahubClient) -> None:
+ """Initialize the async file handler.
+
+ Args:
+ client: The async Infrahub client instance.
+ """
+ self._client = client
+
+ def _build_url(self, node_id: str, branch: str | None) -> str:
+ """Build the download URL for a file.
+
+ Args:
+ node_id: The ID of the FileObject node.
+ branch: Optional branch name.
+
+ Returns:
+ The complete URL for downloading the file.
+ """
+ url = f"{self._client.address}/api/storage/files/{node_id}"
+ if branch:
+ url = f"{url}?branch={branch}"
+ return url
+
+ @overload
+ async def download(self, node_id: str, branch: str | None) -> bytes: ...
+
+ @overload
+ async def download(self, node_id: str, branch: str | None, dest: Path) -> int: ...
+
+ @overload
+ async def download(self, node_id: str, branch: str | None, dest: None) -> bytes: ...
+
+ async def download(self, node_id: str, branch: str | None, dest: Path | None = None) -> bytes | int:
+ """Download file content from a FileObject node.
+
+ Args:
+ node_id: The ID of the FileObject node.
+ branch: Optional branch name. Uses client default if not provided.
+ dest: Optional destination path. If provided, streams to disk.
+
+ Returns:
+ If dest is None: The file content as bytes.
+ If dest is provided: The number of bytes written.
+
+ Raises:
+ ServerNotReachableError: If the server is not reachable.
+ AuthenticationError: If authentication fails.
+ NodeNotFoundError: If the node/file is not found.
+ """
+ effective_branch = branch or self._client.default_branch
+ url = self._build_url(node_id=node_id, branch=effective_branch)
+
+ if dest is not None:
+ return await self._stream_to_file(url=url, dest=dest)
+
+ try:
+ resp = await self._client._get(url=url)
+ except ServerNotReachableError:
+ self._client.log.error(f"Unable to connect to {self._client.address}")
+ raise
+
+ return self.handle_response(resp=resp)
+
+ async def _stream_to_file(self, url: str, dest: Path) -> int:
+ """Stream download directly to a file without loading into memory.
+
+ Args:
+ url: The URL to download from.
+ dest: The destination path to write to.
+
+ Returns:
+ The number of bytes written to the file.
+
+ Raises:
+ ServerNotReachableError: If the server is not reachable.
+ AuthenticationError: If authentication fails.
+ NodeNotFoundError: If the file is not found.
+ """
+ try:
+ async with self._client._get_streaming(url=url) as resp:
+ try:
+ resp.raise_for_status()
+ except httpx.HTTPStatusError as exc:
+ # Need to read the response body for error details
+ await resp.aread()
+ self.handle_error_response(exc=exc)
+
+ bytes_written = 0
+ async with await anyio.Path(dest).open("wb") as f:
+ async for chunk in resp.aiter_bytes(chunk_size=65536):
+ await f.write(chunk)
+ bytes_written += len(chunk)
+ return bytes_written
+ except ServerNotReachableError:
+ self._client.log.error(f"Unable to connect to {self._client.address}")
+ raise
+
+
+class FileHandlerSync(FileHandlerBase):
+ """Sync file handler for download operations.
+
+ Handles file downloads with support for streaming to disk
+ for memory-efficient handling of large files.
+ """
+
+ def __init__(self, client: InfrahubClientSync) -> None:
+ """Initialize the sync file handler.
+
+ Args:
+ client: The sync Infrahub client instance.
+ """
+ self._client = client
+
+ def _build_url(self, node_id: str, branch: str | None) -> str:
+ """Build the download URL for a file.
+
+ Args:
+ node_id: The ID of the FileObject node.
+ branch: Optional branch name.
+
+ Returns:
+ The complete URL for downloading the file.
+ """
+ url = f"{self._client.address}/api/storage/files/{node_id}"
+ if branch:
+ url = f"{url}?branch={branch}"
+ return url
+
+ @overload
+ def download(self, node_id: str, branch: str | None) -> bytes: ...
+
+ @overload
+ def download(self, node_id: str, branch: str | None, dest: Path) -> int: ...
+
+ @overload
+ def download(self, node_id: str, branch: str | None, dest: None) -> bytes: ...
+
+ def download(self, node_id: str, branch: str | None, dest: Path | None = None) -> bytes | int:
+ """Download file content from a FileObject node.
+
+ Args:
+ node_id: The ID of the FileObject node.
+ branch: Optional branch name. Uses client default if not provided.
+ dest: Optional destination path. If provided, streams to disk.
+
+ Returns:
+ If dest is None: The file content as bytes.
+ If dest is provided: The number of bytes written.
+
+ Raises:
+ ServerNotReachableError: If the server is not reachable.
+ AuthenticationError: If authentication fails.
+ NodeNotFoundError: If the node/file is not found.
+ """
+ effective_branch = branch or self._client.default_branch
+ url = self._build_url(node_id=node_id, branch=effective_branch)
+
+ if dest is not None:
+ return self._stream_to_file(url=url, dest=dest)
+
+ try:
+ resp = self._client._get(url=url)
+ except ServerNotReachableError:
+ self._client.log.error(f"Unable to connect to {self._client.address}")
+ raise
+
+ return self.handle_response(resp=resp)
+
+ def _stream_to_file(self, url: str, dest: Path) -> int:
+ """Stream download directly to a file without loading into memory.
+
+ Args:
+ url: The URL to download from.
+ dest: The destination path to write to.
+
+ Returns:
+ The number of bytes written to the file.
+
+ Raises:
+ ServerNotReachableError: If the server is not reachable.
+ AuthenticationError: If authentication fails.
+ NodeNotFoundError: If the file is not found.
+ """
+ try:
+ with self._client._get_streaming(url=url) as resp:
+ try:
+ resp.raise_for_status()
+ except httpx.HTTPStatusError as exc:
+ # Need to read the response body for error details
+ resp.read()
+ self.handle_error_response(exc=exc)
+
+ bytes_written = 0
+ with dest.open("wb") as f:
+ for chunk in resp.iter_bytes(chunk_size=65536):
+ f.write(chunk)
+ bytes_written += len(chunk)
+ return bytes_written
+ except ServerNotReachableError:
+ self._client.log.error(f"Unable to connect to {self._client.address}")
+ raise
diff --git a/infrahub_sdk/graphql/__init__.py b/infrahub_sdk/graphql/__init__.py
index 33438e35..743919b6 100644
--- a/infrahub_sdk/graphql/__init__.py
+++ b/infrahub_sdk/graphql/__init__.py
@@ -1,9 +1,11 @@
from .constants import VARIABLE_TYPE_MAPPING
+from .multipart import MultipartBuilder
from .query import Mutation, Query
from .renderers import render_input_block, render_query_block, render_variables_to_string
__all__ = [
"VARIABLE_TYPE_MAPPING",
+ "MultipartBuilder",
"Mutation",
"Query",
"render_input_block",
diff --git a/infrahub_sdk/graphql/constants.py b/infrahub_sdk/graphql/constants.py
index e2033155..0fed5c57 100644
--- a/infrahub_sdk/graphql/constants.py
+++ b/infrahub_sdk/graphql/constants.py
@@ -1,4 +1,6 @@
from datetime import datetime
+from pathlib import Path
+from typing import BinaryIO
VARIABLE_TYPE_MAPPING = (
(str, "String!"),
@@ -11,4 +13,7 @@
(bool | None, "Boolean"),
(datetime, "DateTime!"),
(datetime | None, "DateTime"),
+ (bytes, "Upload!"),
+ (Path, "Upload!"),
+ (BinaryIO, "Upload!"),
)
diff --git a/infrahub_sdk/graphql/multipart.py b/infrahub_sdk/graphql/multipart.py
new file mode 100644
index 00000000..bdb1f84e
--- /dev/null
+++ b/infrahub_sdk/graphql/multipart.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+import ujson
+
+if TYPE_CHECKING:
+ from typing import BinaryIO
+
+
+class MultipartBuilder:
+ """Builds multipart form data payloads for GraphQL file uploads.
+
+ This class implements the GraphQL Multipart Request Spec for uploading files via GraphQL mutations. The spec defines a standard way to send files
+ alongside GraphQL operations using multipart/form-data.
+
+ The payload structure follows the spec:
+ - operations: JSON containing the GraphQL query and variables
+ - map: JSON mapping file keys to variable paths
+ - 0, 1, ...: The actual file contents
+
+ Example payload:
+ {
+ "operations": '{"query": "mutation($file: Upload!) {...}", "variables": {"file": null}}',
+ "map": '{"0": ["variables.file"]}',
+ "0": (filename, file_content)
+ }
+ """
+
+ @staticmethod
+ def build_operations(query: str, variables: dict[str, Any]) -> str:
+ """Build the operations JSON string.
+
+ Args:
+ query: The GraphQL query string.
+ variables: The variables dict (file variable should be null).
+
+ Returns:
+ JSON string containing the query and variables.
+ """
+ return ujson.dumps({"query": query, "variables": variables})
+
+ @staticmethod
+ def build_file_map(file_key: str = "0", variable_path: str = "variables.file") -> str:
+ """Build the file map JSON string.
+
+ Args:
+ file_key: The key used for the file in the multipart payload.
+ variable_path: The path to the file variable in the GraphQL variables.
+
+ Returns:
+ JSON string mapping the file key to the variable path.
+ """
+ return ujson.dumps({file_key: [variable_path]})
+
+ @staticmethod
+ def build_payload(
+ query: str,
+ variables: dict[str, Any],
+ file_content: BinaryIO | None = None,
+ file_name: str = "upload",
+ ) -> dict[str, Any]:
+ """Build the complete multipart form data payload.
+
+ Constructs the payload according to the GraphQL Multipart Request Spec. The returned dict can be passed directly to httpx as the `files`
+ parameter.
+
+ Args:
+ query: The GraphQL query string containing $file: Upload! variable.
+ variables: The variables dict. The 'file' key will be set to null.
+ file_content: The file content as a file-like object (BinaryIO).
+ If None, only the operations and map will be included.
+ file_name: The filename to use for the upload.
+
+ Returns:
+ A dict suitable for httpx's `files` parameter in a POST request.
+
+ Example:
+ >>> builder = MultipartBuilder()
+ >>> payload = builder.build_payload(
+ ... query="mutation($file: Upload!) { upload(file: $file) { id } }",
+ ... variables={"other": "value"},
+ ... file_content=open("file.pdf", "rb"),
+ ... file_name="document.pdf",
+ ... )
+ >>> # payload can be passed to httpx.post(..., files=payload)
+ """
+ # Ensure file variable is null (spec requirement)
+ variables = {**variables, "file": None}
+
+ operations = MultipartBuilder.build_operations(query=query, variables=variables)
+ file_map = MultipartBuilder.build_file_map()
+
+ files: dict[str, Any] = {"operations": (None, operations), "map": (None, file_map)}
+
+ if file_content is not None:
+ # httpx streams from file-like objects automatically
+ files["0"] = (file_name, file_content)
+
+ return files
diff --git a/infrahub_sdk/graphql/renderers.py b/infrahub_sdk/graphql/renderers.py
index b0d2ab28..5b6c2c0f 100644
--- a/infrahub_sdk/graphql/renderers.py
+++ b/infrahub_sdk/graphql/renderers.py
@@ -3,7 +3,8 @@
import json
from datetime import datetime
from enum import Enum
-from typing import Any
+from pathlib import Path
+from typing import Any, BinaryIO
from pydantic import BaseModel
@@ -88,7 +89,7 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str:
return str(value)
-GRAPHQL_VARIABLE_TYPES = type[str | int | float | bool | datetime | None]
+GRAPHQL_VARIABLE_TYPES = type[str | int | float | bool | datetime | bytes | Path | BinaryIO | None]
def render_variables_to_string(data: dict[str, GRAPHQL_VARIABLE_TYPES]) -> str:
@@ -148,10 +149,7 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver
elif isinstance(value, dict) and len(value) == 1 and alias_key in value and value[alias_key]:
lines.append(f"{offset_str}{value[alias_key]}: {key}")
elif isinstance(value, dict):
- if value.get(alias_key):
- key_str = f"{value[alias_key]}: {key}"
- else:
- key_str = key
+ key_str = f"{value[alias_key]}: {key}" if value.get(alias_key) else key
if value.get(filters_key):
filters_str = ", ".join(
diff --git a/infrahub_sdk/jinja2.py b/infrahub_sdk/jinja2.py
index 29afbf06..d64d22c1 100644
--- a/infrahub_sdk/jinja2.py
+++ b/infrahub_sdk/jinja2.py
@@ -7,7 +7,7 @@
def identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[tuple[Frame, Syntax]]:
"""This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user.
- We use the rich's Traceback to parse the complete stack trace and extract Frames for each expection found in the trace.
+ We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace.
"""
response = []
diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py
index 8043d567..54dd99aa 100644
--- a/infrahub_sdk/node/attribute.py
+++ b/infrahub_sdk/node/attribute.py
@@ -2,7 +2,7 @@
import ipaddress
from collections.abc import Callable
-from typing import TYPE_CHECKING, Any, get_args
+from typing import TYPE_CHECKING, Any, NamedTuple, get_args
from ..protocols_base import CoreNodeBase
from ..uuidt import UUIDT
@@ -13,6 +13,33 @@
from ..schema import AttributeSchemaAPI
+class _GraphQLPayloadAttribute(NamedTuple):
+ """Result of resolving an attribute value for a GraphQL mutation.
+
+ Attributes:
+ payload: Key/value entries to include in the mutation payload
+ (e.g. ``{"value": ...}`` or ``{"from_pool": ...}``).
+ variables: GraphQL variable bindings for unsafe string values.
+ needs_metadata: When ``True``, the payload needs to append property flags/objects
+ """
+
+ payload: dict[str, Any]
+ variables: dict[str, Any]
+ needs_metadata: bool
+
+ def to_dict(self) -> dict[str, Any]:
+ return {"data": self.payload, "variables": self.variables}
+
+ def add_properties(self, properties_flag: dict[str, Any], properties_object: dict[str, str | None]) -> None:
+ if not self.needs_metadata:
+ return
+ for prop_name, prop in properties_flag.items():
+ self.payload[prop_name] = prop
+
+ for prop_name, prop in properties_object.items():
+ self.payload[prop_name] = prop
+
+
class Attribute:
"""Represents an attribute of a Node, including its schema, value, and properties."""
@@ -25,8 +52,12 @@ def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> N
"""
self.name = name
self._schema = schema
+ self._from_pool: dict[str, Any] | None = None
- if not isinstance(data, dict) or "value" not in data:
+ if isinstance(data, dict) and "from_pool" in data:
+ self._from_pool = data.pop("from_pool")
+ data.setdefault("value", None)
+ elif not isinstance(data, dict) or "value" not in data:
data = {"value": data}
self._properties_flag = PROPERTIES_FLAG
@@ -76,38 +107,55 @@ def value(self, value: Any) -> None:
self._value = value
self.value_has_been_mutated = True
- def _generate_input_data(self) -> dict | None:
- data: dict[str, Any] = {}
- variables: dict[str, Any] = {}
-
- if self.value is None:
- if self._schema.optional and self.value_has_been_mutated:
- data["value"] = None
- return data
-
- if isinstance(self.value, str):
- if SAFE_VALUE.match(self.value):
- data["value"] = self.value
- else:
- var_name = f"value_{UUIDT.new().hex}"
- variables[var_name] = self.value
- data["value"] = f"${var_name}"
- elif isinstance(self.value, get_args(IP_TYPES)):
- data["value"] = self.value.with_prefixlen
- elif isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool():
- data["from_pool"] = {"id": self.value.id}
- else:
- data["value"] = self.value
-
- for prop_name in self._properties_flag:
- if getattr(self, prop_name) is not None:
- data[prop_name] = getattr(self, prop_name)
+ def _initialize_graphql_payload(self) -> _GraphQLPayloadAttribute:
+ """Resolve the attribute value into a GraphQL mutation payload object."""
- for prop_name in self._properties_object:
- if getattr(self, prop_name) is not None:
- data[prop_name] = getattr(self, prop_name)._generate_input_data()
+ # Pool-based allocation (dict data or resource-pool node)
+ if self._from_pool is not None:
+ return _GraphQLPayloadAttribute(payload={"from_pool": self._from_pool}, variables={}, needs_metadata=True)
+ if isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool():
+ return _GraphQLPayloadAttribute(
+ payload={"from_pool": {"id": self.value.id}}, variables={}, needs_metadata=True
+ )
- return {"data": data, "variables": variables}
+ # Null value
+ if self.value is None:
+ data = {"value": None} if (self._schema.optional and self.value_has_been_mutated) else {}
+ return _GraphQLPayloadAttribute(payload=data, variables={}, needs_metadata=False)
+
+ # Unsafe strings need a variable binding to avoid injection
+ if isinstance(self.value, str) and not SAFE_VALUE.match(self.value):
+ var_name = f"value_{UUIDT.new().hex}"
+ return _GraphQLPayloadAttribute(
+ payload={"value": f"${var_name}"},
+ variables={var_name: self.value},
+ needs_metadata=True,
+ )
+
+ # Safe strings, IP types, and everything else
+ value = self.value.with_prefixlen if isinstance(self.value, get_args(IP_TYPES)) else self.value
+ return _GraphQLPayloadAttribute(payload={"value": value}, variables={}, needs_metadata=True)
+
+ def _generate_input_data(self) -> _GraphQLPayloadAttribute:
+ """Build the input payload for a GraphQL mutation on this attribute.
+
+ Returns a ResolvedValue object, which contains all the data required.
+ """
+ graphql_payload = self._initialize_graphql_payload()
+
+ properties_flag: dict[str, Any] = {
+ property_name: getattr(self, property_name)
+ for property_name in self._properties_flag
+ if getattr(self, property_name) is not None
+ }
+ properties_object: dict[str, str | None] = {
+ property_name: getattr(self, property_name)._generate_input_data()
+ for property_name in self._properties_object
+ if getattr(self, property_name) is not None
+ }
+ graphql_payload.add_properties(properties_flag, properties_object)
+
+ return graphql_payload
def _generate_query_data(self, property: bool = False, include_metadata: bool = False) -> dict | None:
data: dict[str, Any] = {"value": None}
@@ -128,7 +176,15 @@ def _generate_query_data(self, property: bool = False, include_metadata: bool =
return data
def _generate_mutation_query(self) -> dict[str, Any]:
- if isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool():
+ if self.is_from_pool_attribute():
# If it points to a pool, ask for the value of the pool allocated resource
return {self.name: {"value": None}}
return {}
+
+ def is_from_pool_attribute(self) -> bool:
+ """Check whether this attribute's value is sourced from a resource pool.
+
+ Returns:
+ True if the attribute value is a resource pool node or was explicitly allocated from a pool.
+ """
+ return (isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool()) or self._from_pool is not None
diff --git a/infrahub_sdk/node/constants.py b/infrahub_sdk/node/constants.py
index 8d301115..7a0bc6fd 100644
--- a/infrahub_sdk/node/constants.py
+++ b/infrahub_sdk/node/constants.py
@@ -27,6 +27,9 @@
ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = (
"calling generate is only supported for CoreArtifactDefinition nodes"
)
+FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE = (
+ "calling download_file is only supported for nodes that inherit from CoreFileObject"
+)
HIERARCHY_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = "Hierarchical fields are not supported for this node."
diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py
index 25d9d191..a47209dc 100644
--- a/infrahub_sdk/node/node.py
+++ b/infrahub_sdk/node/node.py
@@ -2,10 +2,12 @@
from collections.abc import Iterable
from copy import copy, deepcopy
-from typing import TYPE_CHECKING, Any
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, BinaryIO
from ..constants import InfrahubClientMode
from ..exceptions import FeatureNotSupportedError, NodeNotFoundError, ResourceNotDefinedError, SchemaNotFoundError
+from ..file_handler import FileHandler, FileHandlerBase, FileHandlerSync, PreparedFile
from ..graphql import Mutation, Query
from ..schema import (
GenericSchemaAPI,
@@ -21,6 +23,7 @@
ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE,
ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE,
ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE,
+ FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE,
PROPERTIES_OBJECT,
)
from .metadata import NodeMetadata
@@ -65,14 +68,15 @@ def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None =
self._attributes = [item.name for item in self._schema.attributes]
self._relationships = [item.name for item in self._schema.relationships]
- # GenericSchemaAPI doesn't have inherit_from, so we need to check the type first
- if isinstance(schema, GenericSchemaAPI):
- self._artifact_support = False
- else:
- inherit_from = getattr(schema, "inherit_from", None) or []
- self._artifact_support = "CoreArtifactTarget" in inherit_from
+ # GenericSchemaAPI doesn't have inherit_from
+ inherit_from: list[str] = getattr(schema, "inherit_from", None) or []
+ self._artifact_support = "CoreArtifactTarget" in inherit_from
+ self._file_object_support = "CoreFileObject" in inherit_from
self._artifact_definition_support = schema.kind == "CoreArtifactDefinition"
+ self._file_content: bytes | Path | BinaryIO | None = None
+ self._file_name: str | None = None
+
# Check if this node is hierarchical (supports parent/children and ancestors/descendants)
if not isinstance(schema, (ProfileSchemaAPI, GenericSchemaAPI, TemplateSchemaAPI)):
self._hierarchy_support = getattr(schema, "hierarchy", None) is not None
@@ -143,7 +147,7 @@ def get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | N
if not hfid:
return None
if include_kind:
- hfid = [self.get_kind()] + hfid
+ hfid = [self.get_kind(), *hfid]
return "__".join(hfid)
@property
@@ -199,7 +203,7 @@ def get_kind(self) -> str:
def get_all_kinds(self) -> list[str]:
if inherit_from := getattr(self._schema, "inherit_from", None):
- return [self._schema.kind] + inherit_from
+ return [self._schema.kind, *inherit_from]
return [self._schema.kind]
def is_ip_prefix(self) -> bool:
@@ -213,10 +217,76 @@ def is_ip_address(self) -> bool:
def is_resource_pool(self) -> bool:
return hasattr(self._schema, "inherit_from") and "CoreResourcePool" in self._schema.inherit_from # type: ignore[union-attr]
+ def is_file_object(self) -> bool:
+ """Check if this node inherits from CoreFileObject and supports file uploads."""
+ return self._file_object_support
+
+ def upload_from_path(self, path: Path) -> None:
+ """Set a file from disk to be uploaded when saving this FileObject node.
+
+ The file will be streamed during upload, avoiding loading the entire file into memory.
+
+ Args:
+ path: Path to the file on disk.
+
+ Raises:
+ FeatureNotSupportedError: If this node doesn't inherit from CoreFileObject.
+
+ Example:
+ node.upload_from_path(path=Path("/path/to/large_file.pdf"))
+ """
+ if not self._file_object_support:
+ raise FeatureNotSupportedError(
+ f"File upload is not supported for {self._schema.kind}. Only nodes inheriting from CoreFileObject support file uploads."
+ )
+ self._file_content = path
+ self._file_name = path.name
+
+ def upload_from_bytes(self, content: bytes | BinaryIO, name: str) -> None:
+ """Set content to be uploaded when saving this FileObject node.
+
+ The content can be provided as bytes or a file-like object.
+ Using BinaryIO is recommended for large content to stream during upload.
+
+ Args:
+ content: The file content as bytes or a file-like object.
+ name: The filename to use for the uploaded file.
+
+ Raises:
+ FeatureNotSupportedError: If this node doesn't inherit from CoreFileObject.
+
+ Examples:
+ >>> # Using bytes (for small files)
+ >>> node.upload_from_bytes(content=b"file content", name="example.txt")
+
+ >>> # Using file-like object (for large files)
+ >>> with open("/path/to/file.bin", "rb") as f:
+ ... node.upload_from_bytes(content=f, name="file.bin")
+ """
+ if not self._file_object_support:
+ raise FeatureNotSupportedError(
+ f"File upload is not supported for {self._schema.kind}. Only nodes inheriting from CoreFileObject support file uploads."
+ )
+ self._file_content = content
+ self._file_name = name
+
+ def clear_file(self) -> None:
+ """Clear any pending file content."""
+ self._file_content = None
+ self._file_name = None
+
+ async def _get_file_for_upload(self) -> PreparedFile:
+ """Get the file content as a file-like object for upload (async version)."""
+ return await FileHandlerBase.prepare_upload(content=self._file_content, name=self._file_name)
+
+ def _get_file_for_upload_sync(self) -> PreparedFile:
+ """Get the file content as a file-like object for upload (sync version)."""
+ return FileHandlerBase.prepare_upload_sync(content=self._file_content, name=self._file_name)
+
def get_raw_graphql_data(self) -> dict | None:
return self._data
- def _generate_input_data( # noqa: C901, PLR0915
+ def _generate_input_data( # noqa: C901
self,
exclude_unmodified: bool = False,
exclude_hfid: bool = False,
@@ -228,27 +298,18 @@ def _generate_input_data( # noqa: C901, PLR0915
dict[str, Dict]: Representation of an input data in dict format
"""
- data = {}
- variables = {}
+ data: dict[str, Any] = {}
+ variables: dict[str, Any] = {}
for item_name in self._attributes:
attr: Attribute = getattr(self, item_name)
if attr._schema.read_only:
continue
- attr_data = attr._generate_input_data()
-
- # NOTE, this code has been inherited when we splitted attributes and relationships
- # into 2 loops, most likely it's possible to simply it
- if attr_data and isinstance(attr_data, dict):
- if variable_values := attr_data.get("data"):
- data[item_name] = variable_values
- else:
- data[item_name] = attr_data
- if variable_names := attr_data.get("variables"):
- variables.update(variable_names)
-
- elif attr_data and isinstance(attr_data, list):
- data[item_name] = attr_data
+ graphql_payload = attr._generate_input_data()
+ if graphql_payload.payload:
+ data[item_name] = graphql_payload.payload
+ if graphql_payload.variables:
+ variables.update(graphql_payload.variables)
for item_name in self._relationships:
allocate_from_pool = False
@@ -297,10 +358,16 @@ def _generate_input_data( # noqa: C901, PLR0915
elif self.hfid is not None and not exclude_hfid:
data["hfid"] = self.hfid
- mutation_payload = {"data": data}
+ mutation_payload: dict[str, Any] = {"data": data}
if context_data := self._get_request_context(request_context=request_context):
mutation_payload["context"] = context_data
+ # Add file variable for FileObject nodes with pending file content
+ # file is a mutation argument at the same level as data, not inside data
+ if self._file_object_support and self._file_content is not None:
+ mutation_payload["file"] = "$file"
+ mutation_variables["file"] = bytes
+
return {
"data": mutation_payload,
"variables": variables,
@@ -426,6 +493,10 @@ def _validate_artifact_definition_support(self, message: str) -> None:
if not self._artifact_definition_support:
raise FeatureNotSupportedError(message)
+ def _validate_file_object_support(self, message: str) -> None:
+ if not self._file_object_support:
+ raise FeatureNotSupportedError(message)
+
def generate_query_data_init(
self,
filters: dict[str, Any] | None = None,
@@ -515,6 +586,7 @@ def __init__(
data: Optional data to initialize the node.
"""
self._client = client
+ self._file_handler = FileHandler(client=client)
# Extract node_metadata before extracting node data (node_metadata is sibling to node in edges)
node_metadata_data: dict | None = None
@@ -567,10 +639,7 @@ def _init_relationships(self, data: dict | RelatedNode | None = None) -> None:
)
if value is not None
}
- if peer_id_data:
- rel_data = peer_id_data
- else:
- rel_data = None
+ rel_data = peer_id_data or None
self._relationship_cardinality_one_data[rel_schema.name] = RelatedNode(
name=rel_schema.name, branch=self._branch, client=self._client, schema=rel_schema, data=rel_data
)
@@ -703,6 +772,41 @@ async def artifact_fetch(self, name: str) -> str | dict[str, Any]:
artifact = await self._client.get(kind="CoreArtifact", name__value=name, object__ids=[self.id])
return await self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value)
+ async def download_file(self, dest: Path | None = None) -> bytes | int:
+ """Download the file content from this FileObject node.
+
+ This method is only available for nodes that inherit from CoreFileObject.
+ The node must have been saved (have an id) before calling this method.
+
+ Args:
+ dest: Optional destination path. If provided, the file will be streamed
+ directly to this path (memory-efficient for large files) and the
+ number of bytes written will be returned. If not provided, the
+ file content will be returned as bytes.
+
+ Returns:
+ If ``dest`` is None: The file content as bytes.
+ If ``dest`` is provided: The number of bytes written to the file.
+
+ Raises:
+ FeatureNotSupportedError: If this node doesn't inherit from CoreFileObject.
+ ValueError: If the node hasn't been saved yet or file not found.
+ AuthenticationError: If authentication fails.
+
+ Examples:
+ >>> # Download to memory
+ >>> content = await contract.download_file()
+
+ >>> # Stream to file (memory-efficient for large files)
+ >>> bytes_written = await contract.download_file(dest=Path("/tmp/contract.pdf"))
+ """
+ self._validate_file_object_support(message=FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE)
+
+ if not self.id:
+ raise ValueError("Cannot download file for a node that hasn't been saved yet.")
+
+ return await self._file_handler.download(node_id=self.id, branch=self._branch, dest=dest)
+
async def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None:
input_data = {"data": {"id": self.id}}
if context_data := self._get_request_context(request_context=request_context):
@@ -1011,11 +1115,7 @@ async def _process_mutation_result(
for attr_name in self._attributes:
attr = getattr(self, attr_name)
- if (
- attr_name not in object_response
- or not isinstance(attr.value, InfrahubNodeBase)
- or not attr.value.is_resource_pool()
- ):
+ if attr_name not in object_response or not attr.is_from_pool_attribute():
continue
# Process allocated resource from a pool and update attribute
@@ -1037,6 +1137,12 @@ async def _process_mutation_result(
async def create(
self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None
) -> None:
+ if self._file_object_support and self._file_content is None:
+ raise ValueError(
+ f"Cannot create {self._schema.kind} without file content. Use upload_from_path() or upload_from_bytes() to provide "
+ "file content before saving."
+ )
+
mutation_query = self._generate_mutation_query()
# Upserting means we may want to create, meaning payload contains all mandatory fields required for a creation,
@@ -1049,19 +1155,39 @@ async def create(
input_data = self._generate_input_data(exclude_hfid=True, request_context=request_context)
mutation_name = f"{self._schema.kind}Create"
tracker = f"mutation-{str(self._schema.kind).lower()}-create"
+
query = Mutation(
mutation=mutation_name,
input_data=input_data["data"],
query=mutation_query,
variables=input_data["mutation_variables"],
)
- response = await self._client.execute_graphql(
- query=query.render(),
- branch_name=self._branch,
- tracker=tracker,
- variables=input_data["variables"],
- timeout=timeout,
- )
+
+ if "file" in input_data["mutation_variables"]:
+ prepared = await self._get_file_for_upload()
+ try:
+ response = await self._client._execute_graphql_with_file(
+ query=query.render(),
+ variables=input_data["variables"],
+ file_content=prepared.file_object,
+ file_name=prepared.filename,
+ branch_name=self._branch,
+ tracker=tracker,
+ timeout=timeout,
+ )
+ finally:
+ if prepared.should_close and prepared.file_object:
+ prepared.file_object.close()
+ # Clear the file content after successful upload
+ self.clear_file()
+ else:
+ response = await self._client.execute_graphql(
+ query=query.render(),
+ branch_name=self._branch,
+ tracker=tracker,
+ variables=input_data["variables"],
+ timeout=timeout,
+ )
await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout)
async def update(
@@ -1070,6 +1196,7 @@ async def update(
input_data = self._generate_input_data(exclude_unmodified=not do_full_update, request_context=request_context)
mutation_query = self._generate_mutation_query()
mutation_name = f"{self._schema.kind}Update"
+ tracker = f"mutation-{str(self._schema.kind).lower()}-update"
query = Mutation(
mutation=mutation_name,
@@ -1077,13 +1204,32 @@ async def update(
query=mutation_query,
variables=input_data["mutation_variables"],
)
- response = await self._client.execute_graphql(
- query=query.render(),
- branch_name=self._branch,
- timeout=timeout,
- tracker=f"mutation-{str(self._schema.kind).lower()}-update",
- variables=input_data["variables"],
- )
+
+ if "file" in input_data["mutation_variables"]:
+ prepared = await self._get_file_for_upload()
+ try:
+ response = await self._client._execute_graphql_with_file(
+ query=query.render(),
+ variables=input_data["variables"],
+ file_content=prepared.file_object,
+ file_name=prepared.filename,
+ branch_name=self._branch,
+ tracker=tracker,
+ timeout=timeout,
+ )
+ finally:
+ if prepared.should_close and prepared.file_object:
+ prepared.file_object.close()
+ # Clear the file content after successful upload
+ self.clear_file()
+ else:
+ response = await self._client.execute_graphql(
+ query=query.render(),
+ branch_name=self._branch,
+ timeout=timeout,
+ tracker=tracker,
+ variables=input_data["variables"],
+ )
await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout)
async def _process_relationships(
@@ -1288,7 +1434,7 @@ async def get_flat_value(self, key: str, separator: str = "__") -> Any:
return await related_node.peer.get_flat_value(key=remaining, separator=separator)
async def extract(self, params: dict[str, str]) -> dict[str, Any]:
- """Extract some datapoints defined in a flat notation."""
+ """Extract some data points defined in a flat notation."""
result: dict[str, Any] = {}
for key, value in params.items():
result[key] = await self.get_flat_value(key=value)
@@ -1323,6 +1469,7 @@ def __init__(
data (Optional[dict]): Optional data to initialize the node.
"""
self._client = client
+ self._file_handler = FileHandlerSync(client=client)
# Extract node_metadata before extracting node data (node_metadata is sibling to node in edges)
node_metadata_data: dict | None = None
@@ -1375,10 +1522,7 @@ def _init_relationships(self, data: dict | None = None) -> None:
)
if value is not None
}
- if peer_id_data:
- rel_data = peer_id_data
- else:
- rel_data = None
+ rel_data = peer_id_data or None
self._relationship_cardinality_one_data[rel_schema.name] = RelatedNodeSync(
name=rel_schema.name, branch=self._branch, client=self._client, schema=rel_schema, data=rel_data
)
@@ -1512,6 +1656,41 @@ def artifact_fetch(self, name: str) -> str | dict[str, Any]:
artifact = self._client.get(kind="CoreArtifact", name__value=name, object__ids=[self.id])
return self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value)
+ def download_file(self, dest: Path | None = None) -> bytes | int:
+ """Download the file content from this FileObject node.
+
+ This method is only available for nodes that inherit from CoreFileObject.
+ The node must have been saved (have an id) before calling this method.
+
+ Args:
+ dest: Optional destination path. If provided, the file will be streamed
+ directly to this path (memory-efficient for large files) and the
+ number of bytes written will be returned. If not provided, the
+ file content will be returned as bytes.
+
+ Returns:
+ If ``dest`` is None: The file content as bytes.
+ If ``dest`` is provided: The number of bytes written to the file.
+
+ Raises:
+ FeatureNotSupportedError: If this node doesn't inherit from CoreFileObject.
+ ValueError: If the node hasn't been saved yet or file not found.
+ AuthenticationError: If authentication fails.
+
+ Examples:
+ >>> # Download to memory
+ >>> content = contract.download_file()
+
+ >>> # Stream to file (memory-efficient for large files)
+ >>> bytes_written = contract.download_file(dest=Path("/tmp/contract.pdf"))
+ """
+ self._validate_file_object_support(message=FILE_DOWNLOAD_FEATURE_NOT_SUPPORTED_MESSAGE)
+
+ if not self.id:
+ raise ValueError("Cannot download file for a node that hasn't been saved yet.")
+
+ return self._file_handler.download(node_id=self.id, branch=self._branch, dest=dest)
+
def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None:
input_data = {"data": {"id": self.id}}
if context_data := self._get_request_context(request_context=request_context):
@@ -1819,11 +1998,7 @@ def _process_mutation_result(
for attr_name in self._attributes:
attr = getattr(self, attr_name)
- if (
- attr_name not in object_response
- or not isinstance(attr.value, InfrahubNodeBase)
- or not attr.value.is_resource_pool()
- ):
+ if attr_name not in object_response or not attr.is_from_pool_attribute():
continue
# Process allocated resource from a pool and update attribute
@@ -1845,6 +2020,12 @@ def _process_mutation_result(
def create(
self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None
) -> None:
+ if self._file_object_support and self._file_content is None:
+ raise ValueError(
+ f"Cannot create {self._schema.kind} without file content. Use upload_from_path() or upload_from_bytes() to provide "
+ "file content before saving."
+ )
+
mutation_query = self._generate_mutation_query()
if allow_upsert:
@@ -1855,6 +2036,7 @@ def create(
input_data = self._generate_input_data(exclude_hfid=True, request_context=request_context)
mutation_name = f"{self._schema.kind}Create"
tracker = f"mutation-{str(self._schema.kind).lower()}-create"
+
query = Mutation(
mutation=mutation_name,
input_data=input_data["data"],
@@ -1862,13 +2044,31 @@ def create(
variables=input_data["mutation_variables"],
)
- response = self._client.execute_graphql(
- query=query.render(),
- branch_name=self._branch,
- tracker=tracker,
- variables=input_data["variables"],
- timeout=timeout,
- )
+ if "file" in input_data["mutation_variables"]:
+ prepared = self._get_file_for_upload_sync()
+ try:
+ response = self._client._execute_graphql_with_file(
+ query=query.render(),
+ variables=input_data["variables"],
+ file_content=prepared.file_object,
+ file_name=prepared.filename,
+ branch_name=self._branch,
+ tracker=tracker,
+ timeout=timeout,
+ )
+ finally:
+ if prepared.should_close and prepared.file_object:
+ prepared.file_object.close()
+ # Clear the file content after successful upload
+ self.clear_file()
+ else:
+ response = self._client.execute_graphql(
+ query=query.render(),
+ branch_name=self._branch,
+ tracker=tracker,
+ variables=input_data["variables"],
+ timeout=timeout,
+ )
self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout)
def update(
@@ -1877,6 +2077,7 @@ def update(
input_data = self._generate_input_data(exclude_unmodified=not do_full_update, request_context=request_context)
mutation_query = self._generate_mutation_query()
mutation_name = f"{self._schema.kind}Update"
+ tracker = f"mutation-{str(self._schema.kind).lower()}-update"
query = Mutation(
mutation=mutation_name,
@@ -1885,13 +2086,31 @@ def update(
variables=input_data["mutation_variables"],
)
- response = self._client.execute_graphql(
- query=query.render(),
- branch_name=self._branch,
- tracker=f"mutation-{str(self._schema.kind).lower()}-update",
- variables=input_data["variables"],
- timeout=timeout,
- )
+ if "file" in input_data["mutation_variables"]:
+ prepared = self._get_file_for_upload_sync()
+ try:
+ response = self._client._execute_graphql_with_file(
+ query=query.render(),
+ variables=input_data["variables"],
+ file_content=prepared.file_object,
+ file_name=prepared.filename,
+ branch_name=self._branch,
+ tracker=tracker,
+ timeout=timeout,
+ )
+ finally:
+ if prepared.should_close and prepared.file_object:
+ prepared.file_object.close()
+ # Clear the file content after successful upload
+ self.clear_file()
+ else:
+ response = self._client.execute_graphql(
+ query=query.render(),
+ branch_name=self._branch,
+ tracker=tracker,
+ variables=input_data["variables"],
+ timeout=timeout,
+ )
self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout)
def _process_relationships(
@@ -2096,7 +2315,7 @@ def get_flat_value(self, key: str, separator: str = "__") -> Any:
return related_node.peer.get_flat_value(key=remaining, separator=separator)
def extract(self, params: dict[str, str]) -> dict[str, Any]:
- """Extract some datapoints defined in a flat notation."""
+ """Extract some data points defined in a flat notation."""
result: dict[str, Any] = {}
for key, value in params.items():
result[key] = self.get_flat_value(key=value)
diff --git a/infrahub_sdk/node/parsers.py b/infrahub_sdk/node/parsers.py
index 0ae830f1..c5d2fbbd 100644
--- a/infrahub_sdk/node/parsers.py
+++ b/infrahub_sdk/node/parsers.py
@@ -4,7 +4,7 @@
def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]:
- """Parse a human friendly ID into a kind and an identifier."""
+ """Parse a human-friendly ID into a kind and an identifier."""
if isinstance(hfid, str):
hfid_parts = hfid.split(HFID_STR_SEPARATOR)
if len(hfid_parts) == 1:
diff --git a/infrahub_sdk/protocols.py b/infrahub_sdk/protocols.py
index b3752bed..c359ad5c 100644
--- a/infrahub_sdk/protocols.py
+++ b/infrahub_sdk/protocols.py
@@ -29,7 +29,6 @@
StringOptional,
)
-# pylint: disable=too-many-ancestors
# ---------------------------------------------
# ASYNC
@@ -108,6 +107,14 @@ class CoreCredential(CoreNode):
description: StringOptional
+class CoreFileObject(CoreNode):
+ file_name: String
+ checksum: String
+ file_size: Integer
+ file_type: String
+ storage_id: String
+
+
class CoreGenericAccount(CoreNode):
name: String
password: HashedPassword
@@ -227,6 +234,7 @@ class CoreValidator(CoreNode):
class CoreWebhook(CoreNode):
name: String
event_type: Enum
+ active: Boolean
branch_scope: Dropdown
node_kind: StringOptional
description: StringOptional
@@ -499,7 +507,6 @@ class CoreProposedChange(CoreTaskTarget):
approved_by: RelationshipManager
rejected_by: RelationshipManager
reviewers: RelationshipManager
- created_by: RelatedNode
comments: RelationshipManager
threads: RelationshipManager
validations: RelationshipManager
@@ -665,6 +672,14 @@ class CoreCredentialSync(CoreNodeSync):
description: StringOptional
+class CoreFileObjectSync(CoreNodeSync):
+ file_name: String
+ checksum: String
+ file_size: Integer
+ file_type: String
+ storage_id: String
+
+
class CoreGenericAccountSync(CoreNodeSync):
name: String
password: HashedPassword
@@ -784,6 +799,7 @@ class CoreValidatorSync(CoreNodeSync):
class CoreWebhookSync(CoreNodeSync):
name: String
event_type: Enum
+ active: Boolean
branch_scope: Dropdown
node_kind: StringOptional
description: StringOptional
@@ -1056,7 +1072,6 @@ class CoreProposedChangeSync(CoreTaskTargetSync):
approved_by: RelationshipManagerSync
rejected_by: RelationshipManagerSync
reviewers: RelationshipManagerSync
- created_by: RelatedNodeSync
comments: RelationshipManagerSync
threads: RelationshipManagerSync
validations: RelationshipManagerSync
diff --git a/infrahub_sdk/protocols_base.py b/infrahub_sdk/protocols_base.py
index 8a841b5b..7f6569ae 100644
--- a/infrahub_sdk/protocols_base.py
+++ b/infrahub_sdk/protocols_base.py
@@ -6,6 +6,7 @@
import ipaddress
from .context import RequestContext
+ from .node.metadata import NodeMetadata
from .schema import MainSchemaTypes
@@ -203,6 +204,8 @@ def is_resource_pool(self) -> bool: ...
def get_raw_graphql_data(self) -> dict | None: ...
+ def get_node_metadata(self) -> NodeMetadata | None: ...
+
@runtime_checkable
class CoreNode(CoreNodeBase, Protocol):
diff --git a/infrahub_sdk/protocols_generator/generator.py b/infrahub_sdk/protocols_generator/generator.py
index e70e221c..38bc968f 100644
--- a/infrahub_sdk/protocols_generator/generator.py
+++ b/infrahub_sdk/protocols_generator/generator.py
@@ -59,13 +59,13 @@ def __init__(self, schema: dict[str, MainSchemaTypesAll]) -> None:
not in {"TYPE_CHECKING", "CoreNode", "Optional", "Protocol", "Union", "annotations", "runtime_checkable"}
]
- self.sorted_generics = self._sort_and_filter_models(self.generics, filters=["CoreNode"] + self.base_protocols)
- self.sorted_nodes = self._sort_and_filter_models(self.nodes, filters=["CoreNode"] + self.base_protocols)
+ self.sorted_generics = self._sort_and_filter_models(self.generics, filters=["CoreNode", *self.base_protocols])
+ self.sorted_nodes = self._sort_and_filter_models(self.nodes, filters=["CoreNode", *self.base_protocols])
self.sorted_profiles = self._sort_and_filter_models(
- self.profiles, filters=["CoreProfile"] + self.base_protocols
+ self.profiles, filters=["CoreProfile", *self.base_protocols]
)
self.sorted_templates = self._sort_and_filter_models(
- self.templates, filters=["CoreObjectTemplate"] + self.base_protocols
+ self.templates, filters=["CoreObjectTemplate", *self.base_protocols]
)
def render(self, sync: bool = True) -> str:
diff --git a/infrahub_sdk/py.typed b/infrahub_sdk/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/infrahub_sdk/pytest_plugin/items/base.py b/infrahub_sdk/pytest_plugin/items/base.py
index a1b35a00..ae08f036 100644
--- a/infrahub_sdk/pytest_plugin/items/base.py
+++ b/infrahub_sdk/pytest_plugin/items/base.py
@@ -75,7 +75,7 @@ def reportinfo(self) -> tuple[Path | str, int | None, str]:
def repository_base(self) -> str:
"""Return the path to the root of the repository
- This will be an absolute path if --infrahub-config-path is an absolut path as happens when
+ This will be an absolute path if --infrahub-config-path is an absolute path as happens when
tests are started from within Infrahub server.
"""
config_path: Path = getattr(self.session, _infrahub_config_path_attribute)
diff --git a/infrahub_sdk/pytest_plugin/items/check.py b/infrahub_sdk/pytest_plugin/items/check.py
index f42f4808..8f68b271 100644
--- a/infrahub_sdk/pytest_plugin/items/check.py
+++ b/infrahub_sdk/pytest_plugin/items/check.py
@@ -12,7 +12,7 @@
from .base import InfrahubItem
if TYPE_CHECKING:
- from pytest import ExceptionInfo
+ import pytest
from ...checks import InfrahubCheck
from ...schema.repository import InfrahubRepositoryConfigElement
@@ -46,7 +46,7 @@ def run_check(self, variables: dict[str, Any]) -> Any:
self.instantiate_check()
return asyncio.run(self.check_instance.run(data=variables))
- def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
+ def repr_failure(self, excinfo: pytest.ExceptionInfo, style: str | None = None) -> str:
if isinstance(excinfo.value, HTTPStatusError):
try:
response_content = ujson.dumps(excinfo.value.response.json(), indent=4)
diff --git a/infrahub_sdk/pytest_plugin/items/graphql_query.py b/infrahub_sdk/pytest_plugin/items/graphql_query.py
index defb9fb9..bced5542 100644
--- a/infrahub_sdk/pytest_plugin/items/graphql_query.py
+++ b/infrahub_sdk/pytest_plugin/items/graphql_query.py
@@ -11,7 +11,7 @@
from .base import InfrahubItem
if TYPE_CHECKING:
- from pytest import ExceptionInfo
+ import pytest
class InfrahubGraphQLQueryItem(InfrahubItem):
@@ -25,7 +25,7 @@ def execute_query(self) -> Any:
variables=self.test.spec.get_variables_data(), # type: ignore[union-attr]
)
- def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
+ def repr_failure(self, excinfo: pytest.ExceptionInfo, style: str | None = None) -> str:
if isinstance(excinfo.value, HTTPStatusError):
try:
response_content = ujson.dumps(excinfo.value.response.json(), indent=4)
diff --git a/infrahub_sdk/pytest_plugin/items/jinja2_transform.py b/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
index 433309a4..fe54fd71 100644
--- a/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
+++ b/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
@@ -16,7 +16,7 @@
from .base import InfrahubItem
if TYPE_CHECKING:
- from pytest import ExceptionInfo
+ import pytest
class InfrahubJinja2Item(InfrahubItem):
@@ -57,7 +57,7 @@ def get_result_differences(self, computed: Any) -> str | None:
)
return "\n".join(differences)
- def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
+ def repr_failure(self, excinfo: pytest.ExceptionInfo, style: str | None = None) -> str:
if isinstance(excinfo.value, HTTPStatusError):
try:
response_content = ujson.dumps(excinfo.value.response.json(), indent=4, sort_keys=True)
@@ -94,7 +94,7 @@ def runtest(self) -> None:
if computed is not None and differences and self.test.expect == InfrahubTestExpectedResult.PASS:
raise OutputMatchError(name=self.name, differences=differences)
- def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
+ def repr_failure(self, excinfo: pytest.ExceptionInfo, style: str | None = None) -> str:
if isinstance(excinfo.value, (JinjaTemplateError)):
return str(excinfo.value.message)
diff --git a/infrahub_sdk/pytest_plugin/items/python_transform.py b/infrahub_sdk/pytest_plugin/items/python_transform.py
index 0ec42052..f895e971 100644
--- a/infrahub_sdk/pytest_plugin/items/python_transform.py
+++ b/infrahub_sdk/pytest_plugin/items/python_transform.py
@@ -13,7 +13,7 @@
from .base import InfrahubItem
if TYPE_CHECKING:
- from pytest import ExceptionInfo
+ import pytest
from ...schema.repository import InfrahubRepositoryConfigElement
from ...transforms import InfrahubTransform
@@ -48,7 +48,7 @@ def run_transform(self, variables: dict[str, Any]) -> Any:
self.instantiate_transform()
return asyncio.run(self.transform_instance.run(data=variables))
- def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
+ def repr_failure(self, excinfo: pytest.ExceptionInfo, style: str | None = None) -> str:
if isinstance(excinfo.value, HTTPStatusError):
try:
response_content = ujson.dumps(excinfo.value.response.json(), indent=4)
diff --git a/infrahub_sdk/pytest_plugin/loader.py b/infrahub_sdk/pytest_plugin/loader.py
index 5912a0c3..c26b09d2 100644
--- a/infrahub_sdk/pytest_plugin/loader.py
+++ b/infrahub_sdk/pytest_plugin/loader.py
@@ -6,7 +6,6 @@
import pytest
import yaml
-from pytest import Item
from .exceptions import InvalidResourceConfigError
from .items import (
@@ -66,7 +65,7 @@ def get_resource_config(self, group: InfrahubTestGroup) -> Any | None:
return resource_config
- def collect_group(self, group: InfrahubTestGroup) -> Iterable[Item]:
+ def collect_group(self, group: InfrahubTestGroup) -> Iterable[pytest.Item]:
"""Collect all items for a group."""
marker = MARKER_MAPPING[group.resource]
resource_config = self.get_resource_config(group)
@@ -98,7 +97,7 @@ def collect_group(self, group: InfrahubTestGroup) -> Iterable[Item]:
yield item
- def collect(self) -> Iterable[Item]:
+ def collect(self) -> Iterable[pytest.Item]:
raw = yaml.safe_load(self.path.open(encoding="utf-8"))
if not raw or "infrahub_tests" not in raw:
diff --git a/infrahub_sdk/pytest_plugin/plugin.py b/infrahub_sdk/pytest_plugin/plugin.py
index 74148a05..258e7f9c 100644
--- a/infrahub_sdk/pytest_plugin/plugin.py
+++ b/infrahub_sdk/pytest_plugin/plugin.py
@@ -3,8 +3,7 @@
import os
from pathlib import Path
-from pytest import Collector, Config, Item, Parser, Session
-from pytest import exit as exit_test
+import pytest
from .. import InfrahubClientSync
from ..utils import is_valid_url
@@ -12,7 +11,7 @@
from .utils import find_repository_config_file, load_repository_config
-def pytest_addoption(parser: Parser) -> None:
+def pytest_addoption(parser: pytest.Parser) -> None:
group = parser.getgroup("pytest-infrahub")
group.addoption(
"--infrahub-repo-config",
@@ -62,7 +61,7 @@ def pytest_addoption(parser: Parser) -> None:
)
-def pytest_sessionstart(session: Session) -> None:
+def pytest_sessionstart(session: pytest.Session) -> None:
if session.config.option.infrahub_repo_config:
session.infrahub_config_path = Path(session.config.option.infrahub_repo_config) # type: ignore[attr-defined]
else:
@@ -72,7 +71,7 @@ def pytest_sessionstart(session: Session) -> None:
session.infrahub_repo_config = load_repository_config(repo_config_file=session.infrahub_config_path) # type: ignore[attr-defined]
if not is_valid_url(session.config.option.infrahub_address):
- exit_test("Infrahub test instance address is not a valid URL", returncode=1)
+ pytest.exit("Infrahub test instance address is not a valid URL", returncode=1)
client_config = {
"address": session.config.option.infrahub_address,
@@ -89,13 +88,13 @@ def pytest_sessionstart(session: Session) -> None:
session.infrahub_client = infrahub_client # type: ignore[attr-defined]
-def pytest_collect_file(parent: Collector | Item, file_path: Path) -> InfrahubYamlFile | None:
+def pytest_collect_file(parent: pytest.Collector | pytest.Item, file_path: Path) -> InfrahubYamlFile | None:
if file_path.suffix in {".yml", ".yaml"} and file_path.name.startswith("test_"):
return InfrahubYamlFile.from_parent(parent, path=file_path)
return None
-def pytest_configure(config: Config) -> None:
+def pytest_configure(config: pytest.Config) -> None:
config.addinivalue_line("markers", "infrahub: Infrahub test")
config.addinivalue_line("markers", "infrahub_smoke: Smoke test for an Infrahub resource")
config.addinivalue_line("markers", "infrahub_unit: Unit test for an Infrahub resource, works without dependencies")
diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py
index cf7a6fc3..30d9f93d 100644
--- a/infrahub_sdk/spec/object.py
+++ b/infrahub_sdk/spec/object.py
@@ -265,7 +265,7 @@ async def validate_object(
# First validate if all mandatory fields are present
errors.extend(
- ObjectValidationError(position=position + [element], message=f"{element} is mandatory")
+ ObjectValidationError(position=[*position, element], message=f"{element} is mandatory")
for element in schema.mandatory_input_names
if not any([element in data, element in context])
)
@@ -275,7 +275,7 @@ async def validate_object(
if key not in schema.attribute_names and key not in schema.relationship_names:
errors.append(
ObjectValidationError(
- position=position + [key],
+ position=[*position, key],
message=f"{key} is not a valid attribute or relationship for {schema.kind}",
)
)
@@ -283,7 +283,7 @@ async def validate_object(
if key in schema.attribute_names and not isinstance(value, (str, int, float, bool, list, dict)):
errors.append(
ObjectValidationError(
- position=position + [key],
+ position=[*position, key],
message=f"{key} must be a string, int, float, bool, list, or dict",
)
)
@@ -295,7 +295,7 @@ async def validate_object(
if not rel_info.is_valid:
errors.append(
ObjectValidationError(
- position=position + [key],
+ position=[*position, key],
message=rel_info.reason_relationship_not_valid or "Invalid relationship",
)
)
@@ -303,7 +303,7 @@ async def validate_object(
errors.extend(
await cls.validate_related_nodes(
client=client,
- position=position + [key],
+ position=[*position, key],
rel_info=rel_info,
data=value,
context=context,
@@ -378,7 +378,7 @@ async def validate_related_nodes(
errors.extend(
await cls.validate_object(
client=client,
- position=position + [idx + 1],
+ position=[*position, idx + 1],
schema=peer_schema,
data=peer_data,
context=context,
@@ -403,7 +403,7 @@ async def validate_related_nodes(
errors.extend(
await cls.validate_object(
client=client,
- position=position + [idx + 1],
+ position=[*position, idx + 1],
schema=peer_schema,
data=item["data"],
context=context,
@@ -613,7 +613,7 @@ async def create_related_nodes(
node = await cls.create_node(
client=client,
schema=peer_schema,
- position=position + [rel_info.name, idx + 1],
+ position=[*position, rel_info.name, idx + 1],
data=peer_data,
context=context,
branch=branch,
@@ -639,7 +639,7 @@ async def create_related_nodes(
node = await cls.create_node(
client=client,
schema=peer_schema,
- position=position + [rel_info.name, idx + 1],
+ position=[*position, rel_info.name, idx + 1],
data=item["data"],
context=context,
branch=branch,
@@ -681,7 +681,7 @@ def spec(self) -> InfrahubObjectFileData:
try:
self._spec = InfrahubObjectFileData(**self.data.spec)
except Exception as exc:
- raise ValidationError(identifier=str(self.location), message=str(exc))
+ raise ValidationError(identifier=str(self.location), message=str(exc)) from exc
return self._spec
def validate_content(self) -> None:
@@ -691,7 +691,7 @@ def validate_content(self) -> None:
try:
self._spec = InfrahubObjectFileData(**self.data.spec)
except Exception as exc:
- raise ValidationError(identifier=str(self.location), message=str(exc))
+ raise ValidationError(identifier=str(self.location), message=str(exc)) from exc
async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> None:
self.validate_content()
diff --git a/infrahub_sdk/template/__init__.py b/infrahub_sdk/template/__init__.py
index ff866ecd..6a7f7fe2 100644
--- a/infrahub_sdk/template/__init__.py
+++ b/infrahub_sdk/template/__init__.py
@@ -64,14 +64,11 @@ def get_template(self) -> jinja2.Template:
return self._template_definition
try:
- if self.is_string_based:
- template = self._get_string_based_template()
- else:
- template = self._get_file_based_template()
+ template = self._get_string_based_template() if self.is_string_based else self._get_file_based_template()
except jinja2.TemplateSyntaxError as exc:
self._raise_template_syntax_error(error=exc)
except jinja2.TemplateNotFound as exc:
- raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name))
+ raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name)) from exc
return template
@@ -119,19 +116,18 @@ async def render(self, variables: dict[str, Any]) -> str:
try:
output = await template.render_async(variables)
except jinja2.exceptions.TemplateNotFound as exc:
- raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name), base_template=template.name)
+ raise JinjaTemplateNotFoundError(
+ message=exc.message, filename=str(exc.name), base_template=template.name
+ ) from exc
except jinja2.TemplateSyntaxError as exc:
self._raise_template_syntax_error(error=exc)
except jinja2.UndefinedError as exc:
traceback = Traceback(show_locals=False)
errors = _identify_faulty_jinja_code(traceback=traceback)
- raise JinjaTemplateUndefinedError(message=exc.message, errors=errors)
+ raise JinjaTemplateUndefinedError(message=exc.message, errors=errors) from exc
except Exception as exc:
- if error_message := getattr(exc, "message", None):
- message = error_message
- else:
- message = str(exc)
- raise JinjaTemplateError(message=message or "Unknown template error")
+ message = error_message if (error_message := getattr(exc, "message", None)) else str(exc)
+ raise JinjaTemplateError(message=message or "Unknown template error") from exc
return output
@@ -195,10 +191,7 @@ def _identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3
# Extract only the Jinja related exception
for frame in [frame for frame in traceback.trace.stacks[0].frames if not frame.filename.endswith(".py")]:
code = "".join(linecache.getlines(frame.filename))
- if frame.filename == "":
- lexer_name = "text"
- else:
- lexer_name = Traceback._guess_lexer(frame.filename, code)
+ lexer_name = "text" if frame.filename == "" else Traceback._guess_lexer(frame.filename, code)
syntax = Syntax(
code,
lexer_name,
diff --git a/infrahub_sdk/testing/schemas/file_object.py b/infrahub_sdk/testing/schemas/file_object.py
new file mode 100644
index 00000000..dc79b214
--- /dev/null
+++ b/infrahub_sdk/testing/schemas/file_object.py
@@ -0,0 +1,45 @@
+import pytest
+
+from infrahub_sdk import InfrahubClient, InfrahubClientSync
+from infrahub_sdk.schema.main import AttributeKind, NodeSchema, SchemaRoot
+from infrahub_sdk.schema.main import AttributeSchema as Attr
+
+NAMESPACE = "Testing"
+TESTING_FILE_CONTRACT = f"{NAMESPACE}FileContract"
+
+PDF_MAGIC_BYTES = b"%PDF-1.4 fake pdf content for testing"
+PNG_MAGIC_BYTES = b"\x89PNG\r\n\x1a\n fake png content for testing"
+TEXT_CONTENT = b"This is a simple text file content for testing purposes."
+
+
+class SchemaFileObject:
+ @pytest.fixture(scope="class")
+ def schema_file_contract(self) -> NodeSchema:
+ return NodeSchema(
+ name="FileContract",
+ namespace=NAMESPACE,
+ include_in_menu=True,
+ inherit_from=["CoreFileObject"],
+ display_label="file_name__value",
+ human_friendly_id=["contract_ref__value"],
+ order_by=["contract_ref__value"],
+ attributes=[
+ Attr(name="contract_ref", kind=AttributeKind.TEXT, unique=True),
+ Attr(name="description", kind=AttributeKind.TEXT, optional=True),
+ Attr(name="active", kind=AttributeKind.BOOLEAN, default_value=True, optional=True),
+ ],
+ )
+
+ @pytest.fixture(scope="class")
+ def schema_file_object_base(self, schema_file_contract: NodeSchema) -> SchemaRoot:
+ return SchemaRoot(version="1.0", nodes=[schema_file_contract])
+
+ @pytest.fixture(scope="class")
+ async def load_file_object_schema(self, client: InfrahubClient, schema_file_object_base: SchemaRoot) -> None:
+ await client.schema.load(schemas=[schema_file_object_base.to_schema_dict()], wait_until_converged=True)
+
+ @pytest.fixture(scope="class")
+ def load_file_object_schema_sync(
+ self, client_sync: InfrahubClientSync, schema_file_object_base: SchemaRoot
+ ) -> None:
+ client_sync.schema.load(schemas=[schema_file_object_base.to_schema_dict()], wait_until_converged=True)
diff --git a/infrahub_sdk/timestamp.py b/infrahub_sdk/timestamp.py
index 07de7b40..fd69122e 100644
--- a/infrahub_sdk/timestamp.py
+++ b/infrahub_sdk/timestamp.py
@@ -92,7 +92,11 @@ def _parse_string(cls, value: str) -> ZonedDateTime:
params["hours"] = float(match.group(1))
if params:
- return ZonedDateTime.now("UTC").subtract(**params)
+ return ZonedDateTime.now("UTC").subtract(
+ seconds=params.get("seconds", 0.0),
+ minutes=params.get("minutes", 0.0),
+ hours=params.get("hours", 0.0),
+ )
raise TimestampFormatError(f"Invalid time format for {value}")
diff --git a/infrahub_sdk/topological_sort.py b/infrahub_sdk/topological_sort.py
index a323d440..58047bd4 100644
--- a/infrahub_sdk/topological_sort.py
+++ b/infrahub_sdk/topological_sort.py
@@ -61,9 +61,9 @@ def _get_cycles(dependency_dict: dict[str, Iterable[str]], path: list[str]) -> l
cycles = []
for next_node in next_nodes:
if next_node in path:
- cycles.append(path[path.index(next_node) :] + [next_node])
+ cycles.append([*path[path.index(next_node) :], next_node])
else:
- next_cycles = _get_cycles(dependency_dict, path + [next_node])
+ next_cycles = _get_cycles(dependency_dict, [*path, next_node])
if next_cycles:
cycles += next_cycles
return cycles
diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py
index 6168664b..de9bd625 100644
--- a/infrahub_sdk/utils.py
+++ b/infrahub_sdk/utils.py
@@ -145,7 +145,7 @@ def deep_merge_dict(dicta: dict, dictb: dict, path: list | None = None) -> dict:
if key in dicta:
a_val = dicta[key]
if isinstance(a_val, dict) and isinstance(b_val, dict):
- deep_merge_dict(a_val, b_val, path + [str(key)])
+ deep_merge_dict(a_val, b_val, [*path, str(key)])
elif isinstance(a_val, list) and isinstance(b_val, list):
# Merge lists
# Cannot use compare_list because list of dicts won't work (dict not hashable)
@@ -155,7 +155,7 @@ def deep_merge_dict(dicta: dict, dictb: dict, path: list | None = None) -> dict:
elif a_val == b_val or (a_val is not None and b_val is None):
continue
else:
- raise ValueError("Conflict at %s" % ".".join(path + [str(key)]))
+ raise ValueError("Conflict at %s" % ".".join([*path, str(key)]))
else:
dicta[key] = b_val
return dicta
diff --git a/pyproject.toml b/pyproject.toml
index 1f2467a3..4e0716a6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "infrahub-sdk"
-version = "1.18.1"
+version = "1.19.0"
description = "Python Client to interact with Infrahub"
authors = [
{name = "OpsMill", email = "info@opsmill.com"}
@@ -51,6 +51,7 @@ ctl = [
"typer>=0.12.5",
"click==8.1.*",
"ariadne-codegen==0.15.3",
+ "mdxify>=0.2.23; python_version>='3.10'",
]
all = [
@@ -69,7 +70,7 @@ all = [
[dependency-groups]
# Core optional dependencies
tests = [
- "infrahub-testcontainers>=1.5.1",
+ "infrahub-testcontainers>=1.7.3",
"pytest>=9.0,<9.1",
"pytest-asyncio>=1.3,<1.4",
"pytest-clarity>=1.0.1",
@@ -95,7 +96,7 @@ dev = [
{include-group = "types"},
"ipython",
"requests",
- "pre-commit>=2.20.0",
+ "prek>=0.3.0",
"codecov",
"invoke>=2.2.0",
"towncrier>=24.8.0",
@@ -126,18 +127,38 @@ python-version = "3.10"
include = ["infrahub_sdk/**"]
[tool.ty.overrides.rules]
-##################################################################################################
-# The ignored rules below should be removed once the code has been updated, they are included #
-# like this so that we can reactivate them one by one. #
-##################################################################################################
-invalid-argument-type = "ignore"
-invalid-assignment = "ignore"
-invalid-await = "ignore"
-invalid-type-form = "ignore"
-no-matching-overload = "ignore"
-unresolved-attribute = "ignore"
unused-ignore-comment = "ignore" # Clashes with mypy's type ignore comments
+# File-specific overrides for remaining type violations
+# Fix these incrementally by addressing violations and removing the override
+
+
+[[tool.ty.overrides]]
+include = ["infrahub_sdk/checks.py"]
+
+[tool.ty.overrides.rules]
+invalid-await = "ignore" # 1 violation
+
+[[tool.ty.overrides]]
+include = ["infrahub_sdk/file_handler.py", "infrahub_sdk/utils.py"]
+
+[tool.ty.overrides.rules]
+unresolved-attribute = "ignore" # 5 violations total (1 in file_handler.py, 4 in utils.py)
+
+[[tool.ty.overrides]]
+include = ["infrahub_sdk/transfer/**"]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # 2 violations in importer/json.py
+invalid-assignment = "ignore" # 1 violation in importer/json.py
+
+[[tool.ty.overrides]]
+include = ["infrahub_sdk/node/node.py"]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # 9 violations - lines 776, 855, 859, 862
+
+
[[tool.ty.overrides]]
include = ["infrahub_sdk/ctl/config.py"]
@@ -149,19 +170,82 @@ unresolved-import = "ignore" # import tomli as tomllib when running on later ver
include = ["tests/**"]
[tool.ty.overrides.rules]
-##################################################################################################
-# The ignored rules below should be removed once the code has been updated, they are included #
-# like this so that we can reactivate them one by one. #
-##################################################################################################
-invalid-argument-type = "ignore"
+unused-ignore-comment = "ignore" # Clashes with mypy's type ignore comments
+
+[[tool.ty.overrides]]
+include = ["tests/fixtures/**"]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # Test fixtures - dynamic mock data
+possibly-missing-attribute = "ignore" # Test fixtures use dynamic attributes
+
+# Test-specific overrides - tests have more lenient type checking
+# Fix these incrementally, starting with files that have fewer violations
+
+[[tool.ty.overrides]]
+include = ["tests/unit/sdk/conftest.py"]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # 434 violations - test fixtures with dynamic types
+
+[[tool.ty.overrides]]
+include = [
+ "tests/unit/sdk/test_node.py",
+ "tests/unit/sdk/test_hierarchical_nodes.py",
+ "tests/unit/sdk/test_schema.py",
+]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # 97 violations total across these files
invalid-assignment = "ignore"
-invalid-method-override = "ignore"
-no-matching-overload = "ignore"
not-subscriptable = "ignore"
not-iterable = "ignore"
-possibly-missing-attribute = "ignore"
unresolved-attribute = "ignore"
-unused-ignore-comment = "ignore" # Clashes with mypy's type ignore comments
+possibly-missing-attribute = "ignore"
+
+[[tool.ty.overrides]]
+include = [
+ "tests/integration/**",
+ "tests/unit/sdk/test_store_branch.py",
+ "tests/unit/sdk/test_repository.py",
+]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # ~120 violations across integration tests
+invalid-assignment = "ignore"
+no-matching-overload = "ignore"
+possibly-missing-attribute = "ignore" # Tests use dynamic node attributes
+
+[[tool.ty.overrides]]
+include = [
+ "tests/unit/sdk/spec/test_object.py",
+ "tests/unit/sdk/test_client.py",
+ "tests/unit/ctl/test_graphql_utils.py",
+]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # 29 violations
+invalid-assignment = "ignore"
+no-matching-overload = "ignore"
+possibly-missing-attribute = "ignore"
+
+[[tool.ty.overrides]]
+include = [
+ "tests/unit/sdk/test_artifact.py",
+ "tests/unit/sdk/test_group_context.py",
+ "tests/unit/sdk/test_utils.py",
+ "tests/unit/sdk/checks/test_checks.py",
+ "tests/unit/sdk/graphql/test_plugin.py",
+ "tests/unit/sdk/test_protocols_generator.py",
+ "tests/unit/sdk/test_schema_sorter.py",
+ "tests/unit/sdk/test_topological_sort.py",
+ "tests/unit/sdk/test_schema_export.py",
+]
+
+[tool.ty.overrides.rules]
+invalid-argument-type = "ignore" # Remaining files with 1-5 violations each
+invalid-method-override = "ignore"
+no-matching-overload = "ignore"
[[tool.ty.overrides]]
include = ["docs/**"]
@@ -237,18 +321,15 @@ ignore = [
# investigation if they are deemed to not make sense. #
##################################################################################################
"B008", # Do not perform function call `typer.Option` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
- "B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling
"N802", # Function name should be lowercase
"PERF203", # `try`-`except` within a loop incurs performance overhead
"PLR0913", # Too many arguments in function definition
"PLR0917", # Too many positional arguments
"PLR2004", # Magic value used in comparison
"PLR6301", # Method could be a function, class method, or static method
- "RUF005", # Consider `[*path, str(key)]` instead of concatenation
"RUF029", # Function is declared `async`, but doesn't `await` or use `async` features.
"RUF067", # `__init__` module should only contain docstrings and re-exports
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
- "SIM108", # Use ternary operator `key_str = f"{value[ALIAS_KEY]}: {key}" if ALIAS_KEY in value and value[ALIAS_KEY] else key` instead of `if`-`else`-block
"TC003", # Move standard library import `collections.abc.Iterable` into a type-checking block
"UP031", # Use format specifiers instead of percent format
]
@@ -291,7 +372,6 @@ max-complexity = 17
"ANN202", # Missing return type annotation for private function
"ANN401", # Dynamically typed expressions (typing.Any) are disallowed
"ASYNC240", # Async functions should not use pathlib.Path methods, use trio.Path or anyio.path
- "PT013", # Incorrect import of `pytest`; use `import pytest` instead
]
"infrahub_sdk/client.py" = [
@@ -306,6 +386,7 @@ max-complexity = 17
# Review and change the below later #
##################################################################################################
"PLR0912", # Too many branches
+ "PLR0904", # Too many public methods
]
"infrahub_sdk/node/related_node.py" = [
@@ -347,12 +428,11 @@ max-complexity = 17
"S106", # Possible hardcoded password assigned to argument
"ARG001", # Unused function argument
"ARG002", # Unused method argument
- "PT006", # Wrong type passed to first argument of `pytest.mark.parametrize`; expected `tuple`
"PT011", # `pytest.raises(ValueError)` is too broad, set the `match` parameter or use a more specific exception
"PT012", # `pytest.raises()` block should contain a single simple statement
- "PT013", # Incorrect import of `pytest`; use `import pytest` instead
]
+
# tests/integration/
"tests/integration/test_infrahub_client.py" = ["PLR0904"]
"tests/integration/test_infrahub_client_sync.py" = ["PLR0904"]
diff --git a/tasks.py b/tasks.py
index 24dcb642..32b13ae6 100644
--- a/tasks.py
+++ b/tasks.py
@@ -1,32 +1,64 @@
-import asyncio
+from __future__ import annotations
+
import json
+import operator
+import shutil
import sys
+from functools import reduce
from pathlib import Path
from shutil import which
-from typing import Any
+from typing import TYPE_CHECKING
+
+from invoke import Context, Exit, task
+
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods.command.typer_command import ATyperCommand
-from invoke import Context, task
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
CURRENT_DIRECTORY = Path(__file__).resolve()
DOCUMENTATION_DIRECTORY = CURRENT_DIRECTORY.parent / "docs"
MAIN_DIRECTORY_PATH = Path(__file__).parent
-
-def is_tool_installed(name: str) -> bool:
- """Check whether `name` is on PATH and marked as executable."""
- return which(name) is not None
-
-
-def _generate(context: Context) -> None:
- """Generate documentation output from code."""
+# Priority ordering for generated API documentation pages.
+# Keys match the mdxify output filenames (same keys used in generated_files dict).
+PAGE_PRIORITIES: dict[str, PagePriority] = {
+ "infrahub_sdk-client.mdx": PagePriority(
+ sections=["Classes"],
+ classes=["InfrahubClient", "InfrahubClientSync"],
+ methods={
+ "InfrahubClient": ["get", "delete", "create"],
+ "InfrahubClientSync": ["get", "delete", "create"],
+ },
+ ),
+ "infrahub_sdk-node-node.mdx": PagePriority(
+ classes=["InfrahubNode", "InfrahubNodeSync"],
+ ),
+}
+
+
+def require_tool(name: str, install_hint: str) -> None:
+ """Raise ``Exit`` if *name* is not found on PATH."""
+ if which(name) is None:
+ raise Exit(f" - {name} is not installed. {install_hint}", code=1)
+
+
+@task(name="docs-generate")
+def docs_generate(context: Context) -> None:
+ """Generate all documentation (infrahubctl CLI + Python SDK)."""
_generate_infrahubctl_documentation(context=context)
- _generate_infrahub_sdk_configuration_documentation()
- _generate_infrahub_sdk_template_documentation()
+ generate_python_sdk(context)
def _generate_infrahubctl_documentation(context: Context) -> None:
"""Generate the documentation for infrahubctl CLI using typer-cli."""
+ from docs.docs_generation.content_gen_methods import (
+ CommandOutputDocContentGenMethod,
+ TyperGroupCommand,
+ TyperSingleCommand,
+ )
+ from docs.docs_generation.pages import DocPage, MDXDocPage
from infrahub_sdk.ctl.cli import app
output_dir = DOCUMENTATION_DIRECTORY / "docs" / "infrahubctl"
@@ -37,113 +69,158 @@ def _generate_infrahubctl_documentation(context: Context) -> None:
file.unlink()
print(" - Generate infrahubctl CLI documentation")
- for cmd in app.registered_commands:
- if cmd.hidden:
- continue
- exec_cmd = (
- f'uv run typer --func {cmd.name} infrahub_sdk.ctl.cli_commands utils docs --name "infrahubctl {cmd.name}"'
+ commands: list[ATyperCommand] = [
+ TyperSingleCommand(name=cmd.name) for cmd in app.registered_commands if not cmd.hidden and cmd.name
+ ]
+ commands.extend(TyperGroupCommand(name=cmd.name) for cmd in app.registered_groups if not cmd.hidden and cmd.name)
+
+ for typer_cmd in commands:
+ # Generating one documentation page for one command
+ page = DocPage(
+ content_gen_method=CommandOutputDocContentGenMethod(
+ context=context,
+ working_directory=MAIN_DIRECTORY_PATH,
+ command=typer_cmd,
+ ),
)
- exec_cmd += f" --output docs/docs/infrahubctl/infrahubctl-{cmd.name}.mdx"
- with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
-
- for cmd in app.registered_groups:
- if cmd.hidden:
- continue
- exec_cmd = f"uv run typer infrahub_sdk.ctl.{cmd.name} utils docs"
- exec_cmd += f' --name "infrahubctl {cmd.name}" --output docs/docs/infrahubctl/infrahubctl-{cmd.name}.mdx'
- with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
+ output_path = output_dir / f"infrahubctl-{typer_cmd.name}.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
def _generate_infrahub_sdk_configuration_documentation() -> None:
"""Generate documentation for the Infrahub SDK configuration."""
- import jinja2
-
- from infrahub_sdk.config import ConfigBase
-
- schema = ConfigBase.model_json_schema()
- env_vars = _get_env_vars()
- definitions = schema.get("$defs", {})
-
- properties = []
- for name, prop in schema["properties"].items():
- choices: list[dict[str, Any]] = []
- kind = ""
- composed_type = ""
-
- if "allOf" in prop:
- choices = definitions.get(prop["allOf"][0]["$ref"].split("/")[-1], {}).get("enum", [])
- kind = definitions.get(prop["allOf"][0]["$ref"].split("/")[-1], {}).get("type", "")
-
- if "anyOf" in prop:
- composed_type = ", ".join(i["type"] for i in prop.get("anyOf", []) if "type" in i and i["type"] != "null")
-
- properties.append(
- {
- "name": name,
- "description": prop.get("description", ""),
- "type": prop.get("type", kind) or composed_type or "object",
- "choices": choices,
- "default": prop.get("default", ""),
- "env_vars": env_vars.get(name, []),
- }
- )
+ from docs.docs_generation.content_gen_methods import Jinja2DocContentGenMethod
+ from docs.docs_generation.helpers import build_config_properties
+ from docs.docs_generation.pages import DocPage, MDXDocPage
+ from infrahub_sdk.template import Jinja2Template
print(" - Generate Infrahub SDK configuration documentation")
-
- template_file = DOCUMENTATION_DIRECTORY / "_templates" / "sdk_config.j2"
- output_file = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "config.mdx"
-
- if not template_file.exists():
- print(f"Unable to find the template file at {template_file}")
- sys.exit(-1)
-
- template_text = template_file.read_text(encoding="utf-8")
-
- environment = jinja2.Environment(trim_blocks=True, autoescape=jinja2.select_autoescape(default_for_string=False))
- template = environment.from_string(template_text)
- rendered_file = template.render(properties=properties)
-
- output_file.write_text(rendered_file, encoding="utf-8")
- print(f"Docs saved to: {output_file}")
+ # Generating one documentation page for the ConfigBase.model_json_schema()
+ page = DocPage(
+ content_gen_method=Jinja2DocContentGenMethod(
+ template=Jinja2Template(
+ template=Path("sdk_config.j2"),
+ template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ ),
+ template_variables={"properties": build_config_properties()},
+ ),
+ )
+ output_path = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "config.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
def _generate_infrahub_sdk_template_documentation() -> None:
"""Generate documentation for the Infrahub SDK template reference."""
+ from docs.docs_generation.content_gen_methods import Jinja2DocContentGenMethod
+ from docs.docs_generation.pages import DocPage, MDXDocPage
from infrahub_sdk.template import Jinja2Template
from infrahub_sdk.template.filters import BUILTIN_FILTERS, NETUTILS_FILTERS
- output_file = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "templating.mdx"
- jinja2_template = Jinja2Template(
- template=Path("sdk_template_reference.j2"),
- template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ print(" - Generate Infrahub SDK template documentation")
+ # Generating one documentation page for template documentation
+ page = DocPage(
+ content_gen_method=Jinja2DocContentGenMethod(
+ template=Jinja2Template(
+ template=Path("sdk_template_reference.j2"),
+ template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ ),
+ template_variables={"builtin": BUILTIN_FILTERS, "netutils": NETUTILS_FILTERS},
+ ),
)
+ output_path = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "templating.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+
+def get_modules_to_document() -> list[str]:
+ """Return the list of Python module paths to document with mdxify.
+
+ Auto-discovers packages under ``infrahub_sdk/`` and validates that every
+ discovered package is explicitly categorised as either *to document* or
+ *to ignore*. Individual ``.py`` modules can be added via
+ ``extra_modules_to_document``.
+ """
+ # Packages (sub-folders of infrahub_sdk/) to document.
+ # Passed to mdxify as "infrahub_sdk.".
+ packages_to_document = [
+ "node",
+ ]
+
+ # Packages explicitly ignored for API doc generation.
+ packages_to_ignore = [
+ "ctl",
+ "graphql",
+ "protocols_generator",
+ "pytest_plugin",
+ "schema",
+ "spec",
+ "task",
+ "template",
+ "testing",
+ "transfer",
+ ]
+
+ # Extra modules (individual .py files, not packages) to document.
+ extra_modules_to_document = [
+ "infrahub_sdk.client",
+ ]
+
+ # Auto-discover all packages under infrahub_sdk/
+ sdk_dir = Path(__file__).parent / "infrahub_sdk"
+ discovered_packages = {d.name for d in sdk_dir.iterdir() if d.is_dir() and (d / "__init__.py").exists()}
+
+ # Validate that every discovered package is categorized and vice versa
+ declared = set(packages_to_document) | set(packages_to_ignore)
+ uncategorized = discovered_packages - declared
+ unknown = declared - discovered_packages
+
+ if uncategorized:
+ raise ValueError(
+ f"Uncategorized packages under infrahub_sdk/: {sorted(uncategorized)}. "
+ "Add them to packages_to_document or packages_to_ignore in tasks.py"
+ )
+
+ if unknown:
+ raise ValueError(f"Declared packages that no longer exist: {sorted(unknown)}")
+
+ return [f"infrahub_sdk.{pkg}" for pkg in packages_to_document] + extra_modules_to_document
+
- rendered_file = asyncio.run(
- jinja2_template.render(variables={"builtin": BUILTIN_FILTERS, "netutils": NETUTILS_FILTERS})
+@task(name="generate-sdk-api-docs")
+def _generate_sdk_api_docs(context: Context) -> None:
+ """Generate API documentation for the Python SDK."""
+ from docs.docs_generation.content_gen_methods import (
+ CollapsedOverloadCodeDocumentation,
+ FilePrintingDocContentGenMethod,
+ MdxCodeDocumentation,
+ OrderedMdxCodeDocumentation,
)
- output_file.write_text(rendered_file, encoding="utf-8")
- print(f"Docs saved to: {output_file}")
+ from docs.docs_generation.pages import DocPage, MDXDocPage
+ print(" - Generate Python SDK API documentation")
+ require_tool("mdxify", "Install it with: uv sync --all-groups --all-extras")
-def _get_env_vars() -> dict[str, list[str]]:
- """Retrieve environment variables for Infrahub SDK configuration."""
- from collections import defaultdict
+ modules_to_document = get_modules_to_document()
- from pydantic_settings import EnvSettingsSource
+ output_dir = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "sdk_ref"
- from infrahub_sdk.config import ConfigBase
+ if (output_dir / "infrahub_sdk").exists():
+ shutil.rmtree(output_dir / "infrahub_sdk")
- env_vars: dict[str, list[str]] = defaultdict(list)
- settings = ConfigBase()
- env_settings = EnvSettingsSource(settings.__class__, env_prefix=settings.model_config.get("env_prefix", ""))
+ documentation = CollapsedOverloadCodeDocumentation(
+ documentation=OrderedMdxCodeDocumentation(
+ documentation=MdxCodeDocumentation(),
+ page_priorities=PAGE_PRIORITIES,
+ )
+ )
+ generated_files = documentation.generate(context=context, modules_to_document=modules_to_document)
- for field_name, field in settings.model_fields.items():
- for field_key, field_env_name, _ in env_settings._extract_field_info(field, field_name):
- env_vars[field_key].append(field_env_name.upper())
+ for file_key, mdxified_file in generated_files.items():
+ page = DocPage(content_gen_method=FilePrintingDocContentGenMethod(file=mdxified_file))
+ target_path = output_dir / reduce(operator.truediv, (Path(part) for part in file_key.split("-")))
+ MDXDocPage(page=page, output_path=target_path).to_mdx()
- return env_vars
+ with context.cd(DOCUMENTATION_DIRECTORY):
+ context.run(f"npx --no-install markdownlint-cli2 {output_dir}/ --fix --config .markdownlint.yaml", pty=True)
@task
@@ -195,24 +272,18 @@ def lint_ruff(context: Context) -> None:
@task
def lint_markdownlint(context: Context) -> None:
"""Run markdownlint to check all markdown files."""
- if not is_tool_installed("markdownlint-cli2"):
- print(" - markdownlint-cli2 is not installed, skipping documentation linting")
- return
-
print(" - Check documentation with markdownlint-cli2")
- exec_cmd = "markdownlint-cli2 **/*.{md,mdx} --config .markdownlint.yaml"
- with context.cd(MAIN_DIRECTORY_PATH):
+ exec_cmd = "npx --no-install markdownlint-cli2 **/*.{md,mdx} !node_modules/** --config .markdownlint.yaml"
+ with context.cd(DOCUMENTATION_DIRECTORY):
context.run(exec_cmd)
@task
def lint_vale(context: Context) -> None:
"""Run vale to check all documentation files."""
- if not is_tool_installed("vale"):
- print(" - vale is not installed, skipping documentation style linting")
- return
-
print(" - Check documentation style with vale")
+ require_tool("vale", "Install it from: https://vale.sh/docs/install")
+
exec_cmd = r'vale $(find ./docs -type f \( -name "*.mdx" -o -name "*.md" \) -not -path "*/node_modules/*")'
with context.cd(MAIN_DIRECTORY_PATH):
context.run(exec_cmd)
@@ -243,11 +314,27 @@ def lint_all(context: Context) -> None:
@task(name="docs-validate")
def docs_validate(context: Context) -> None:
- """Validate that the generated documentation is committed to Git."""
- _generate(context=context)
- exec_cmd = "git diff --exit-code docs"
- with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
+ """Validate that the generated documentation matches the committed version.
+
+ Regenerates all documentation and checks for modified, deleted, or new
+ untracked files under docs/. Exits with a non-zero code and a descriptive
+ message when the working tree diverges from what is committed.
+ """
+ docs_generate(context)
+ with context.cd(DOCUMENTATION_DIRECTORY):
+ diff_result = context.run("git diff --name-only docs", hide=True)
+ changed_files = diff_result.stdout.strip() if diff_result else ""
+ untracked_result = context.run("git ls-files --others --exclude-standard docs", hide=True)
+ untracked_files = untracked_result.stdout.strip() if untracked_result else ""
+
+ if changed_files or untracked_files:
+ message = "Generated documentation is out of sync with the committed version.\n"
+ message += "Run 'uv run invoke docs-generate' and commit the result.\n\n"
+ if changed_files:
+ message += f"Modified or deleted files:\n{changed_files}\n\n"
+ if untracked_files:
+ message += f"New untracked files:\n{untracked_files}\n"
+ raise Exit(message, code=1)
@task(name="docs")
@@ -269,10 +356,11 @@ def generate_infrahubctl(context: Context) -> None:
@task(name="generate-sdk")
-def generate_python_sdk(context: Context) -> None: # noqa: ARG001
+def generate_python_sdk(context: Context) -> None:
"""Generate documentation for the Python SDK."""
_generate_infrahub_sdk_configuration_documentation()
_generate_infrahub_sdk_template_documentation()
+ _generate_sdk_api_docs(context)
@task
diff --git a/tests/AGENTS.md b/tests/AGENTS.md
index f3608ead..cce67364 100644
--- a/tests/AGENTS.md
+++ b/tests/AGENTS.md
@@ -17,6 +17,12 @@ uv run pytest tests/unit/test_client.py # Single file
```text
tests/
├── unit/ # Fast, mocked, no external deps
+│ ├── ctl/ # CLI command tests
+│ └── sdk/ # SDK tests
+│ ├── pool/ # Resource pool allocation tests
+│ ├── spec/ # Object spec tests
+│ ├── checks/ # InfrahubCheck tests
+│ └── ... # Core SDK tests (client, node, schema, etc.)
├── integration/ # Real Infrahub via testcontainers
├── fixtures/ # Test data (JSON, YAML)
└── helpers/ # Test utilities
diff --git a/tests/integration/test_export_import.py b/tests/integration/test_export_import.py
index a138728f..d317d5e7 100644
--- a/tests/integration/test_export_import.py
+++ b/tests/integration/test_export_import.py
@@ -15,8 +15,6 @@
from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter
if TYPE_CHECKING:
- from pytest import TempPathFactory
-
from infrahub_sdk import InfrahubClient
from infrahub_sdk.node import InfrahubNode
from infrahub_sdk.schema import SchemaRoot
@@ -24,7 +22,7 @@
class TestSchemaExportImportBase(TestInfrahubDockerClient, SchemaCarPerson):
@pytest.fixture(scope="class")
- def temporary_directory(self, tmp_path_factory: TempPathFactory) -> Path:
+ def temporary_directory(self, tmp_path_factory: pytest.TempPathFactory) -> Path:
return tmp_path_factory.mktemp("infrahub-integration-tests")
@pytest.fixture(scope="class")
@@ -189,7 +187,7 @@ async def test_step99_import_wrong_directory(self, client: InfrahubClient) -> No
class TestSchemaExportImportManyRelationships(TestInfrahubDockerClient, SchemaCarPerson):
@pytest.fixture(scope="class")
- def temporary_directory(self, tmp_path_factory: TempPathFactory) -> Path:
+ def temporary_directory(self, tmp_path_factory: pytest.TempPathFactory) -> Path:
return tmp_path_factory.mktemp("infrahub-integration-tests-many")
@pytest.fixture(scope="class")
diff --git a/tests/integration/test_file_object.py b/tests/integration/test_file_object.py
new file mode 100644
index 00000000..49dd7421
--- /dev/null
+++ b/tests/integration/test_file_object.py
@@ -0,0 +1,260 @@
+from __future__ import annotations
+
+import hashlib
+import tempfile
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from infrahub_sdk.testing.docker import TestInfrahubDockerClient
+from infrahub_sdk.testing.schemas.file_object import (
+ PDF_MAGIC_BYTES,
+ PNG_MAGIC_BYTES,
+ TESTING_FILE_CONTRACT,
+ TEXT_CONTENT,
+ SchemaFileObject,
+)
+
+if TYPE_CHECKING:
+ from infrahub_sdk import InfrahubClient, InfrahubClientSync
+
+
+@pytest.mark.xfail(reason="Requires Infrahub 1.8+")
+class TestFileObjectAsync(TestInfrahubDockerClient, SchemaFileObject):
+ """Async integration tests for FileObject functionality."""
+
+ async def test_create_file_object_with_upload(self, client: InfrahubClient, load_file_object_schema: None) -> None:
+ """Test creating FileObject nodes with both upload_from_bytes and upload_from_path."""
+ contract_bytes = await client.create(
+ kind=TESTING_FILE_CONTRACT,
+ contract_ref="CONTRACT-CREATE-BYTES-001",
+ description="Test contract with bytes upload",
+ )
+ contract_bytes.upload_from_bytes(content=PDF_MAGIC_BYTES, name="contract.pdf")
+ await contract_bytes.save()
+
+ fetched = await client.get(kind=TESTING_FILE_CONTRACT, id=contract_bytes.id)
+ assert fetched.contract_ref.value == "CONTRACT-CREATE-BYTES-001"
+ assert fetched.file_name.value == "contract.pdf"
+ assert fetched.file_size.value == len(PDF_MAGIC_BYTES)
+ assert fetched.checksum.value == hashlib.sha1(PDF_MAGIC_BYTES, usedforsecurity=False).hexdigest()
+ assert fetched.storage_id.value
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_path = Path(tmpdir) / "upload_test.txt"
+ tmp_path.write_bytes(TEXT_CONTENT)
+
+ contract_path = await client.create(
+ kind=TESTING_FILE_CONTRACT,
+ contract_ref="CONTRACT-CREATE-PATH-001",
+ description="Test contract from path",
+ )
+ contract_path.upload_from_path(path=tmp_path)
+ await contract_path.save()
+
+ fetched = await client.get(kind=TESTING_FILE_CONTRACT, id=contract_path.id)
+ assert fetched.file_name.value == tmp_path.name
+ assert fetched.file_size.value == len(TEXT_CONTENT)
+ assert fetched.checksum.value == hashlib.sha1(TEXT_CONTENT, usedforsecurity=False).hexdigest()
+ assert fetched.storage_id.value
+
+ async def test_update_file_object_with_new_file(
+ self, client: InfrahubClient, load_file_object_schema: None
+ ) -> None:
+ """Test updating a FileObject node with a new file."""
+ contract = await client.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-UPDATE-001", description="Initial contract"
+ )
+ contract.upload_from_bytes(content=PDF_MAGIC_BYTES, name="initial.pdf")
+ await contract.save()
+
+ contract_to_update = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ contract_to_update.description.value = "Updated contract"
+ contract_to_update.upload_from_bytes(content=PNG_MAGIC_BYTES, name="updated.png")
+ await contract_to_update.save()
+
+ updated = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ assert updated.description.value == "Updated contract"
+ assert updated.file_name.value == "updated.png"
+ assert updated.storage_id.value != contract.storage_id.value
+ assert updated.checksum.value != contract.checksum.value
+
+ async def test_upsert_file_object_update(self, client: InfrahubClient, load_file_object_schema: None) -> None:
+ """Test upserting an existing FileObject node updates it rather than creating a duplicate."""
+ contract = await client.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-UPSERT-001", description="Original"
+ )
+ contract.upload_from_bytes(content=PDF_MAGIC_BYTES, name="original.pdf")
+ await contract.save()
+
+ contract_upsert = await client.create(
+ kind=TESTING_FILE_CONTRACT,
+ contract_ref="CONTRACT-UPSERT-001",
+ description="Upserted update",
+ )
+ contract_upsert.upload_from_bytes(content=PNG_MAGIC_BYTES, name="upserted.png")
+ await contract_upsert.save(allow_upsert=True)
+ assert contract_upsert.id == contract.id
+
+ updated = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ assert updated.description.value == "Upserted update"
+ assert updated.file_name.value == "upserted.png"
+ assert updated.storage_id.value != contract.storage_id.value
+
+ async def test_download_file(self, client: InfrahubClient, load_file_object_schema: None) -> None:
+ """Test downloading files to memory and to disk."""
+ contract = await client.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-DOWNLOAD-001", description="Download test"
+ )
+ contract.upload_from_bytes(content=TEXT_CONTENT, name="download_test.txt")
+ await contract.save()
+
+ fetched = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ downloaded_content = await fetched.download_file()
+ assert downloaded_content == TEXT_CONTENT
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ dest_path = Path(tmpdir) / "downloaded.txt"
+ bytes_written = await fetched.download_file(dest=dest_path)
+ assert bytes_written == len(TEXT_CONTENT)
+ assert dest_path.read_bytes() == TEXT_CONTENT
+
+ async def test_update_without_file_change(self, client: InfrahubClient, load_file_object_schema: None) -> None:
+ """Test updating FileObject attributes without replacing the file."""
+ contract = await client.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-META-001", description="Original description"
+ )
+ contract.upload_from_bytes(content=TEXT_CONTENT, name="unchanged.txt")
+ await contract.save()
+
+ contract_to_update = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ contract_to_update.description.value = "Updated description"
+ await contract_to_update.save()
+
+ updated = await client.get(kind=TESTING_FILE_CONTRACT, id=contract.id)
+ assert updated.description.value == "Updated description"
+ assert updated.storage_id.value == contract_to_update.storage_id.value
+ assert updated.checksum.value == contract_to_update.checksum.value
+
+
+@pytest.mark.xfail(reason="Requires Infrahub 1.8+")
+class TestFileObjectSync(TestInfrahubDockerClient, SchemaFileObject):
+ """Sync integration tests for FileObject functionality."""
+
+ def test_create_file_object_with_upload_sync(
+ self, client_sync: InfrahubClientSync, load_file_object_schema_sync: None
+ ) -> None:
+ """Test creating FileObject nodes with both upload_from_bytes and upload_from_path (sync)."""
+ contract_bytes = client_sync.create(
+ kind=TESTING_FILE_CONTRACT,
+ contract_ref="CONTRACT-CREATE-BYTES-SYNC-001",
+ description="Test contract with bytes upload (sync)",
+ )
+ contract_bytes.upload_from_bytes(content=PDF_MAGIC_BYTES, name="contract_sync.pdf")
+ contract_bytes.save()
+
+ fetched = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract_bytes.id)
+ assert fetched.contract_ref.value == "CONTRACT-CREATE-BYTES-SYNC-001"
+ assert fetched.file_name.value == "contract_sync.pdf"
+ assert fetched.file_size.value == len(PDF_MAGIC_BYTES)
+ assert fetched.checksum.value == hashlib.sha1(PDF_MAGIC_BYTES, usedforsecurity=False).hexdigest()
+ assert fetched.storage_id.value
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_path = Path(tmpdir) / "upload_test_sync.txt"
+ tmp_path.write_bytes(TEXT_CONTENT)
+
+ contract_path = client_sync.create(
+ kind=TESTING_FILE_CONTRACT,
+ contract_ref="CONTRACT-CREATE-PATH-SYNC-001",
+ description="Test contract from path (sync)",
+ )
+ contract_path.upload_from_path(path=tmp_path)
+ contract_path.save()
+
+ fetched = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract_path.id)
+ assert fetched.file_name.value == tmp_path.name
+ assert fetched.file_size.value == len(TEXT_CONTENT)
+ assert fetched.checksum.value == hashlib.sha1(TEXT_CONTENT, usedforsecurity=False).hexdigest()
+ assert fetched.storage_id.value
+
+ def test_update_file_object_with_new_file_sync(
+ self, client_sync: InfrahubClientSync, load_file_object_schema_sync: None
+ ) -> None:
+ """Test updating a FileObject node with a new file (sync)."""
+ contract = client_sync.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-UPDATE-SYNC-001", description="Initial contract sync"
+ )
+ contract.upload_from_bytes(content=PDF_MAGIC_BYTES, name="initial_sync.pdf")
+ contract.save()
+
+ contract_to_update = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ contract_to_update.description.value = "Updated contract sync"
+ contract_to_update.upload_from_bytes(content=PNG_MAGIC_BYTES, name="updated_sync.png")
+ contract_to_update.save()
+
+ updated = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ assert updated.description.value == "Updated contract sync"
+ assert updated.file_name.value == "updated_sync.png"
+ assert updated.storage_id.value != contract.storage_id.value
+ assert updated.checksum.value != contract.checksum.value
+
+ def test_upsert_file_object_update_sync(
+ self, client_sync: InfrahubClientSync, load_file_object_schema_sync: None
+ ) -> None:
+ """Test upserting an existing FileObject node updates it rather than creating a duplicate (sync)."""
+ contract = client_sync.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-UPSERT-SYNC-001", description="Original sync"
+ )
+ contract.upload_from_bytes(content=PDF_MAGIC_BYTES, name="original_sync.pdf")
+ contract.save()
+
+ contract_upsert = client_sync.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-UPSERT-SYNC-001", description="Upserted update sync"
+ )
+ contract_upsert.upload_from_bytes(content=PNG_MAGIC_BYTES, name="upserted_sync.png")
+ contract_upsert.save(allow_upsert=True)
+ assert contract_upsert.id == contract.id
+
+ updated = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ assert updated.description.value == "Upserted update sync"
+ assert updated.file_name.value == "upserted_sync.png"
+ assert updated.storage_id.value != contract.storage_id.value
+
+ def test_download_file_sync(self, client_sync: InfrahubClientSync, load_file_object_schema_sync: None) -> None:
+ """Test downloading files to memory and to disk (sync)."""
+ contract = client_sync.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-DOWNLOAD-SYNC-001", description="Download test sync"
+ )
+ contract.upload_from_bytes(content=TEXT_CONTENT, name="download_sync.txt")
+ contract.save()
+
+ fetched = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ downloaded_content = fetched.download_file()
+ assert downloaded_content == TEXT_CONTENT
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ dest_path = Path(tmpdir) / "downloaded_sync.txt"
+ bytes_written = fetched.download_file(dest=dest_path)
+ assert bytes_written == len(TEXT_CONTENT)
+ assert dest_path.read_bytes() == TEXT_CONTENT
+
+ def test_update_without_file_change_sync(
+ self, client_sync: InfrahubClientSync, load_file_object_schema_sync: None
+ ) -> None:
+ """Test updating FileObject attributes without replacing the file (sync)."""
+ contract = client_sync.create(
+ kind=TESTING_FILE_CONTRACT, contract_ref="CONTRACT-META-SYNC-001", description="Original description sync"
+ )
+ contract.upload_from_bytes(content=TEXT_CONTENT, name="unchanged_sync.txt")
+ contract.save()
+
+ contract_to_update = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id, populate_store=False)
+ contract_to_update.description.value = "Updated description sync"
+ contract_to_update.save()
+
+ updated = client_sync.get(kind=TESTING_FILE_CONTRACT, id=contract.id)
+ assert updated.description.value == "Updated description sync"
+ assert updated.storage_id.value == contract_to_update.storage_id.value
+ assert updated.checksum.value == contract_to_update.checksum.value
diff --git a/tests/integration/test_infrahub_client.py b/tests/integration/test_infrahub_client.py
index 2fe9d801..54be643a 100644
--- a/tests/integration/test_infrahub_client.py
+++ b/tests/integration/test_infrahub_client.py
@@ -162,7 +162,6 @@ async def test_profile(self, client: InfrahubClient, base_dataset: None, person_
obj1 = await client.get(kind=TESTING_DOG, id=obj.id)
assert obj1.color.value == "#111111"
- @pytest.mark.xfail(reason="Require Infrahub v1.7")
async def test_profile_relationship_is_from_profile(
self, client: InfrahubClient, base_dataset: None, person_liam: InfrahubNode
) -> None:
diff --git a/tests/integration/test_infrahub_client_sync.py b/tests/integration/test_infrahub_client_sync.py
index 472c3378..34c91a44 100644
--- a/tests/integration/test_infrahub_client_sync.py
+++ b/tests/integration/test_infrahub_client_sync.py
@@ -161,7 +161,6 @@ def test_profile(self, client_sync: InfrahubClientSync, base_dataset: None, pers
obj1 = client_sync.get(kind=TESTING_DOG, id=obj.id)
assert obj1.color.value == "#222222"
- @pytest.mark.xfail(reason="Require Infrahub v1.7")
def test_profile_relationship_is_from_profile(
self, client_sync: InfrahubClientSync, base_dataset: None, person_liam: InfrahubNode
) -> None:
diff --git a/tests/unit/ctl/test_branch_report.py b/tests/unit/ctl/test_branch_report.py
index c9af76c5..201f1c92 100644
--- a/tests/unit/ctl/test_branch_report.py
+++ b/tests/unit/ctl/test_branch_report.py
@@ -341,19 +341,21 @@ def mock_branch_report_with_proposed_changes(httpx_mock: HTTPXMock) -> HTTPXMock
],
},
"rejected_by": {"count": 0, "edges": []},
+ },
+ "node_metadata": {
+ "created_at": "2025-11-10T14:30:00Z",
"created_by": {
- "node": {
- "id": "187895d8-723e-8f5d-3614-c517ac8e761c",
- "hfid": ["johndoe"],
- "display_label": "John Doe",
- "__typename": "CoreAccount",
- "name": {"value": "John Doe"},
- },
- "properties": {
- "updated_at": "2025-11-10T14:30:00Z",
- },
+ "id": "187895d8-723e-8f5d-3614-c517ac8e761c",
+ "__typename": "CoreAccount",
+ "display_label": "John Doe",
},
- }
+ "updated_at": "2025-11-10T14:30:00Z",
+ "updated_by": {
+ "id": "187895d8-723e-8f5d-3614-c517ac8e761c",
+ "__typename": "CoreAccount",
+ "display_label": "John Doe",
+ },
+ },
},
{
"node": {
@@ -392,19 +394,21 @@ def mock_branch_report_with_proposed_changes(httpx_mock: HTTPXMock) -> HTTPXMock
},
],
},
+ },
+ "node_metadata": {
+ "created_at": "2025-11-12T09:15:00Z",
"created_by": {
- "node": {
- "id": "287895d8-723e-8f5d-3614-c517ac8e762c",
- "hfid": ["janesmith"],
- "display_label": "Jane Smith",
- "__typename": "CoreAccount",
- "name": {"value": "Jane Smith"},
- },
- "properties": {
- "updated_at": "2025-11-10T14:30:00Z",
- },
+ "id": "287895d8-723e-8f5d-3614-c517ac8e762c",
+ "__typename": "CoreAccount",
+ "display_label": "Jane Smith",
+ },
+ "updated_at": "2025-11-12T09:15:00Z",
+ "updated_by": {
+ "id": "287895d8-723e-8f5d-3614-c517ac8e762c",
+ "__typename": "CoreAccount",
+ "display_label": "Jane Smith",
},
- }
+ },
},
],
}
diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py
index 88fd32c9..f159dfa0 100644
--- a/tests/unit/ctl/test_render_app.py
+++ b/tests/unit/ctl/test_render_app.py
@@ -71,7 +71,7 @@ def test_validate_template_not_found(test_case: RenderAppFailure, httpx_mock: HT
@pytest.mark.parametrize(
- "cli_branch,env_branch,from_git,expected_branch",
+ ("cli_branch", "env_branch", "from_git", "expected_branch"),
[
("cli-branch", None, False, "cli-branch"),
(None, "env-branch", False, "env-branch"),
diff --git a/tests/unit/doc_generation/__init__.py b/tests/unit/doc_generation/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/__init__.py b/tests/unit/doc_generation/content_gen_methods/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/command/__init__.py b/tests/unit/doc_generation/content_gen_methods/command/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py b/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py
new file mode 100644
index 00000000..4b0d2363
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods import TyperGroupCommand, TyperSingleCommand
+
+
+class TestTyperSingleCommand:
+ def test_build_exec_cmd(self) -> None:
+ # Arrange
+ cmd = TyperSingleCommand(name="dump")
+
+ # Act
+ result = cmd.build()
+
+ # Assert
+ assert "uv run typer --func dump" in result
+ assert "infrahub_sdk.ctl.cli_commands" in result
+ assert 'utils docs --name "infrahubctl dump"' in result
+
+
+class TestTyperGroupCommand:
+ def test_build_exec_cmd(self) -> None:
+ # Arrange
+ cmd = TyperGroupCommand(name="branch")
+
+ # Act
+ result = cmd.build()
+
+ # Assert
+ assert "uv run typer infrahub_sdk.ctl.branch" in result
+ assert 'utils docs --name "infrahubctl branch"' in result
diff --git a/tests/unit/doc_generation/content_gen_methods/mdx/__init__.py b/tests/unit/doc_generation/content_gen_methods/mdx/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py
new file mode 100644
index 00000000..d08d7e1a
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py
@@ -0,0 +1,290 @@
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import create_autospec
+
+from invoke import Context, Result
+
+from docs.docs_generation.content_gen_methods import (
+ MdxCodeDocumentation,
+)
+
+
+def _make_mock_context(
+ module_files: dict[str, dict[str, str]],
+ calls: list[str] | None = None,
+) -> Context:
+ """Build a mock ``Context`` whose ``run()`` writes files based on requested modules.
+
+ Args:
+ module_files: Mapping of module name to its output files
+ (e.g. ``{"infrahub_sdk.node": {"node.mdx": "# Node"}}``).
+ Only files belonging to modules present in the ``mdxify`` command
+ are written to the output directory.
+ calls: If provided, each executed command string is appended to this
+ list so the caller can verify how many times ``run()`` was invoked.
+ """
+ ctx = create_autospec(Context, instance=True)
+
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ if calls is not None:
+ calls.append(cmd)
+ prefix, output_dir_str = cmd.split("--output-dir ")
+ output_dir = Path(output_dir_str.strip())
+ requested_modules = prefix.replace("mdxify ", "").split()
+ for module in requested_modules:
+ for filename, content in module_files.get(module, {}).items():
+ (output_dir / filename).write_text(content, encoding="utf-8")
+ return Result()
+
+ ctx.run.side_effect = fake_run
+ return ctx
+
+
+def _generate_one(raw_content: str) -> str:
+ """Run a single piece of raw content through ``generate()`` and return the processed result."""
+ mock_context = _make_mock_context({"mod": {"mod.mdx": raw_content}})
+ doc = MdxCodeDocumentation()
+ return doc.generate(context=mock_context, modules_to_document=["mod"])["mod.mdx"].content
+
+
+class TestDoctestWrapping:
+ def test_wraps_bare_doctest_in_code_fence(self) -> None:
+ """Bare doctest lines (>>>) following prose are wrapped in a ```python fence."""
+ # Arrange
+ raw = "**Examples:**\n\n>>> foo()\n'bar'"
+
+ # Act
+ result = _generate_one(raw)
+
+ # Assert
+ assert result == "**Examples:**\n\n```python\n>>> foo()\n'bar'\n```"
+
+ def test_leaves_existing_fenced_code_blocks_untouched(self) -> None:
+ """Content already inside a fenced code block is not double-wrapped."""
+ # Arrange
+ raw = "```python\ndef foo():\n pass\n```\n"
+
+ # Act
+ result = _generate_one(raw)
+
+ # Assert
+ assert result == raw
+
+ def test_wraps_doctest_with_curly_braces(self) -> None:
+ """Curly braces in doctest are preserved inside the fence, preventing MDX/JSX interpolation."""
+ # Arrange
+ raw = '>>> data = {"key": "value"}\n>>> func(data)'
+
+ # Act
+ result = _generate_one(raw)
+
+ # Assert
+ assert result.startswith("```python\n")
+ assert result.endswith("\n```")
+ assert '{"key": "value"}' in result
+
+ def test_closes_doctest_fence_on_blank_line(self) -> None:
+ """A blank line between two doctest blocks produces two separate fenced blocks."""
+ # Arrange
+ raw = ">>> first()\n'a'\n\n>>> second()\n'b'"
+
+ # Act
+ result = _generate_one(raw)
+
+ # Assert
+ assert result.count("```python") == 2
+ assert result.count("```") == 4 # 2 opening + 2 closing
+
+ def test_content_with_no_doctest_is_unchanged(self) -> None:
+ """Plain Markdown without any >>> prompt is returned as-is."""
+ # Arrange
+ raw = "# Title\n\nSome text.\n"
+
+ # Act
+ result = _generate_one(raw)
+
+ # Assert
+ assert result == raw
+
+
+class TestMdxCodeDocumentation:
+ def test_generate_default_filter_returns_filtered_files(self) -> None:
+ """Files matching the default filter (``__init__``) are excluded."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-node.mdx": "# Node",
+ "infrahub_sdk-node-__init__.mdx": "# Init (should be filtered)",
+ },
+ "infrahub_sdk.client": {
+ "infrahub_sdk-client.mdx": "# Client",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node", "infrahub_sdk.client"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-node.mdx" in results
+ assert "infrahub_sdk-client.mdx" in results
+ assert "infrahub_sdk-node-__init__.mdx" not in results
+
+ def test_generate_runs_mdxify_only_once(self) -> None:
+ """Second call returns the same result without re-running mdxify."""
+ # Arrange
+ calls: list[str] = []
+ mock_context = _make_mock_context(
+ {"infrahub_sdk.client": {"infrahub_sdk-client.mdx": "# Client"}},
+ calls=calls,
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ result1 = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.client"],
+ )
+ result2 = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.client"],
+ )
+
+ # Assert
+ assert result1 is result2
+ assert len(calls) == 1
+
+ def test_generate_with_custom_filters(self) -> None:
+ """Custom file_filters exclude files whose names contain the filter substring."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-_private.mdx": "# Private",
+ "infrahub_sdk-node-public.mdx": "# Public",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation(file_filters=["_private"])
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-public.mdx" in results
+ assert "infrahub_sdk-node-_private.mdx" not in results
+
+ def test_generate_empty_output(self) -> None:
+ """When mdxify produces no files for the requested module, an empty dict is returned."""
+ # Arrange
+ mock_context = _make_mock_context({})
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.empty"],
+ )
+
+ # Assert
+ assert results == {}
+
+ def test_generate_only_includes_requested_modules(self) -> None:
+ """Only files belonging to requested modules are returned."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-node.mdx": "# Node",
+ },
+ "infrahub_sdk.client": {
+ "infrahub_sdk-client.mdx": "# Client",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-node.mdx" in results
+ assert "infrahub_sdk-client.mdx" not in results
+
+ def test_generate_reruns_for_different_modules(self) -> None:
+ """Calling generate with different modules re-runs mdxify."""
+ # Arrange
+ calls: list[str] = []
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {"infrahub_sdk-node-node.mdx": "# Node"},
+ "infrahub_sdk.client": {"infrahub_sdk-client.mdx": "# Client"},
+ },
+ calls=calls,
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ result_node = doc.generate(context=mock_context, modules_to_document=["infrahub_sdk.node"])
+ result_client = doc.generate(context=mock_context, modules_to_document=["infrahub_sdk.client"])
+
+ # Assert
+ assert len(calls) == 2
+ assert "infrahub_sdk-node-node.mdx" in result_node
+ assert "infrahub_sdk-client.mdx" in result_client
+
+
+class TestSourcePathDerivation:
+ def test_nested_module(self) -> None:
+ """Deeply nested mdxify filename resolves to the correct Python source path."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {"infrahub_sdk.node": {"infrahub_sdk-node-node.mdx": "# Node"}},
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(context=mock_context, modules_to_document=["infrahub_sdk.node"])
+
+ # Assert
+ mdx = results["infrahub_sdk-node-node.mdx"]
+ assert mdx.name == "infrahub_sdk-node-node.mdx"
+ assert mdx.source_path == Path("infrahub_sdk/node/node.py")
+
+ def test_top_level_module(self) -> None:
+ """Single-dash mdxify filename resolves to a top-level module source path."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {"infrahub_sdk.client": {"infrahub_sdk-client.mdx": "# Client"}},
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(context=mock_context, modules_to_document=["infrahub_sdk.client"])
+
+ # Assert
+ assert results["infrahub_sdk-client.mdx"].source_path == Path("infrahub_sdk/client.py")
+
+ def test_single_name(self) -> None:
+ """Filename without dashes resolves to a single .py file."""
+ # Arrange
+ mock_context = _make_mock_context({"mod": {"mod.mdx": "# Mod"}})
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(context=mock_context, modules_to_document=["mod"])
+
+ # Assert
+ assert results["mod.mdx"].source_path == Path("mod.py")
diff --git a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py
new file mode 100644
index 00000000..9b1befcf
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import create_autospec
+
+from invoke import Context, Result
+
+from docs.docs_generation import ACommand, CommandOutputDocContentGenMethod
+
+
+class StubCommand(ACommand):
+ def __init__(self, cmd: str) -> None:
+ self.cmd = cmd
+
+ def build(self) -> str:
+ return self.cmd
+
+
+class TestCommandOutputDocContentGenMethod:
+ def test_apply_runs_command_and_reads_output(self, tmp_path: Path) -> None:
+ """The method executes the command via context.run, then reads
+ the content from the temp file whose path was appended via --output."""
+ output_content = "# Generated docs"
+
+ # Arrange
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ parts = cmd.split("--output ")
+ output_path = Path(parts[1].strip())
+ output_path.write_text(output_content, encoding="utf-8")
+ return Result()
+
+ mock_context = create_autospec(Context, instance=True)
+ mock_context.run.side_effect = fake_run
+
+ method = CommandOutputDocContentGenMethod(
+ context=mock_context,
+ working_directory=tmp_path,
+ command=StubCommand("some_command"),
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == output_content
+
+ def test_apply_appends_output_flag(self, tmp_path: Path) -> None:
+ """Verify that --output is appended to the command."""
+ captured_cmd: list[str] = []
+
+ # Arrange
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ captured_cmd.append(cmd)
+ parts = cmd.split("--output ")
+ Path(parts[1].strip()).write_text("", encoding="utf-8")
+ return Result()
+
+ mock_context = create_autospec(Context, instance=True)
+ mock_context.run.side_effect = fake_run
+
+ method = CommandOutputDocContentGenMethod(
+ context=mock_context,
+ working_directory=tmp_path,
+ command=StubCommand("base_cmd"),
+ )
+
+ # Act
+ method.apply()
+
+ # Assert
+ assert captured_cmd[0].startswith("base_cmd --output ")
diff --git a/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py b/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py
new file mode 100644
index 00000000..bda91bd9
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation.content_gen_methods.file_printing_method import FilePrintingDocContentGenMethod
+from docs.docs_generation.content_gen_methods.mdx import MdxFile
+
+
+class TestFilePrintingDocContentGenMethod:
+ def test_apply_returns_file_content(self) -> None:
+ # Arrange
+ file = MdxFile(name="node.mdx", content="# Node API\n\nSome content", source_path=Path("node.py"))
+ method = FilePrintingDocContentGenMethod(file=file)
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "# Node API\n\nSome content"
+
+ def test_apply_returns_empty_string(self) -> None:
+ # Arrange
+ file = MdxFile(name="empty.mdx", content="", source_path=Path("empty.py"))
+ method = FilePrintingDocContentGenMethod(file=file)
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert not result
diff --git a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py
new file mode 100644
index 00000000..db332bac
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py
@@ -0,0 +1,74 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation import Jinja2DocContentGenMethod
+from infrahub_sdk.template import Jinja2Template
+
+
+class TestJinja2DocContentGenMethod:
+ def test_apply_renders_string_template(self) -> None:
+ """Verify the method renders a string-based Jinja2Template correctly."""
+ # Arrange
+ template = Jinja2Template(template="rendered {{ key }}")
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"key": "content"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "rendered content"
+
+ def test_apply_renders_template(self, tmp_path: Path) -> None:
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("Hello {{ name }}!", encoding="utf-8")
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"name": "World"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "Hello World!"
+
+ def test_apply_renders_with_multiple_variables(self, tmp_path: Path) -> None:
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("{{ greeting }} {{ target }}!", encoding="utf-8")
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"greeting": "Hi", "target": "there"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "Hi there!"
+
+ def test_auto_escaping_is_disabled(self, tmp_path: Path) -> None:
+ """HTML content in template variables must not be auto-escaped,
+ since the SDK Jinja2 environment does not enable autoescape."""
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("{{ html_content }}", encoding="utf-8")
+ html_input = 'text'
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"html_content": html_input},
+ )
+
+ # Act
+ result: str = method.apply()
+
+ # Assert
+ assert result == html_input
diff --git a/tests/unit/doc_generation/mdx/__init__.py b/tests/unit/doc_generation/mdx/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/mdx/conftest.py b/tests/unit/doc_generation/mdx/conftest.py
new file mode 100644
index 00000000..4e6dbef2
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/conftest.py
@@ -0,0 +1,215 @@
+"""Shared fixtures and helpers for MDX documentation tests."""
+
+from __future__ import annotations
+
+import re
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import MagicMock
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import ACodeDocumentation, MdxFile
+from docs.docs_generation.content_gen_methods.mdx.mdx_ordered_code_doc import OrderedMdxCodeDocumentation
+from docs.docs_generation.content_gen_methods.mdx.mdx_section import MdxSection
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+ from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
+
+FILE_KEY = "test.mdx"
+MOCK_CONTEXT = MagicMock(spec="Context")
+MODULES: list[str] = []
+
+
+# --- Helpers ---
+
+
+class StubDocumentation(ACodeDocumentation):
+ """Minimal stub returning pre-built MdxFile dicts."""
+
+ def __init__(self, files: dict[str, MdxFile]) -> None:
+ self._files = files
+
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ return self._files
+
+
+def build_ordered_doc(content: str, priority: PagePriority) -> OrderedMdxCodeDocumentation:
+ """Build an ``OrderedMdxCodeDocumentation`` with a stub inner documentation."""
+ inner = StubDocumentation({FILE_KEY: MdxFile(name=FILE_KEY, content=content, source_path=Path("test.py"))})
+ return OrderedMdxCodeDocumentation(documentation=inner, page_priorities={FILE_KEY: priority})
+
+
+def section_order(content: str) -> list[str]:
+ """Extract the order of H2 section names."""
+ return re.findall(r"^## (\w+)", content, re.MULTILINE)
+
+
+def class_order(content: str) -> list[str]:
+ """Extract the order of H3 class names under ``## Classes``."""
+ match = re.search(r"^## Classes\n(.*?)(?=^## |\Z)", content, re.MULTILINE | re.DOTALL)
+ if not match:
+ return []
+ return re.findall(r"^### `([^`]+)`", match.group(1), re.MULTILINE)
+
+
+def method_order(content: str, class_name: str) -> list[str]:
+ """Extract the order of H4 method names under a given H3 class section."""
+ pattern = rf"^### `{re.escape(class_name)}`\n(.*?)(?=^### |\Z)"
+ match = re.search(pattern, content, re.MULTILINE | re.DOTALL)
+ if not match:
+ return []
+ return re.findall(r"^#### `([^`]+)`", match.group(1), re.MULTILINE)
+
+
+def make_method_section(name: str, signature: str, docstring: str = "") -> MdxSection:
+ """Create an MdxSection mimicking a method entry in MDX output."""
+ lines = [
+ f"#### `{name}`",
+ "",
+ "```python",
+ signature,
+ "```",
+ ]
+ if docstring:
+ lines.extend(("", docstring))
+ return MdxSection(name=name, heading_level=4, _lines=lines)
+
+
+# --- Fixtures ---
+
+
+@pytest.fixture
+def sample_mdx() -> str:
+ return """\
+---
+title: client
+sidebarTitle: client
+---
+
+# `infrahub_sdk.client`
+
+## Functions
+
+### `handle_relogin`
+
+```python
+handle_relogin(func: Callable) -> Callable
+```
+
+### `handle_relogin_sync`
+
+```python
+handle_relogin_sync(func: Callable) -> Callable
+```
+
+## Classes
+
+### `ProcessRelationsNode`
+
+Process relations for a node.
+
+### `BaseClient`
+
+Base class for InfrahubClient and InfrahubClientSync
+
+**Methods:**
+
+#### `start_tracking`
+
+```python
+start_tracking(self) -> Self
+```
+
+#### `set_context_properties`
+
+```python
+set_context_properties(self, identifier: str) -> None
+```
+
+### `InfrahubClient`
+
+GraphQL Client to interact with Infrahub.
+
+**Methods:**
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+Return the Infrahub version.
+
+#### `create`
+
+```python
+create(self, kind: str) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType]) -> SchemaType
+```
+
+#### `save`
+
+```python
+save(self, node: InfrahubNode) -> None
+```
+
+#### `delete`
+
+```python
+delete(self, kind: str, id: str) -> None
+```
+
+### `InfrahubClientSync`
+
+Synchronous GraphQL Client to interact with Infrahub.
+
+**Methods:**
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+#### `create`
+
+```python
+create(self, kind: str) -> InfrahubNodeSync
+```
+"""
+
+
+@pytest.fixture
+def sample_mdx_no_methods() -> str:
+ return """\
+---
+title: constants
+sidebarTitle: constants
+---
+
+# `infrahub_sdk.node.constants`
+
+## Classes
+
+### `RelatedNodeState`
+
+State of a related node.
+
+### `InfrahubNodeMode`
+
+Mode of an Infrahub node.
+"""
diff --git a/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_code_doc.py b/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_code_doc.py
new file mode 100644
index 00000000..a7ae1c4b
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_code_doc.py
@@ -0,0 +1,341 @@
+"""Tests for CollapsedOverloadCodeDocumentation pipeline decorator."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import MdxFile
+from docs.docs_generation.content_gen_methods.mdx.mdx_collapsed_overload_code_doc import (
+ CollapsedOverloadCodeDocumentation,
+)
+
+from .conftest import FILE_KEY, MOCK_CONTEXT, MODULES, StubDocumentation
+
+
+class TestCollapseOverloads:
+ def test_overloaded_methods_collapsed_to_primary_plus_details(self, sample_mdx_with_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ visible_part = result.split("")[0]
+ assert visible_part.count("#### `get`") == 1
+ assert "" in result
+
+ def test_non_overloaded_methods_unchanged(self, sample_mdx_no_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_no_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result == sample_mdx_no_overloads
+
+ def test_mixed_overloaded_and_non_overloaded(self, sample_mdx_with_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "#### `get`" in result
+ assert "#### `create`" in result
+ assert "#### `delete`" in result
+ assert result.count("") == 2
+
+ def test_primary_is_overload_with_most_params(self, sample_mdx_with_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "timeout" in result
+
+ def test_details_label_shows_correct_count(self, sample_mdx_with_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "Show 2 other overloads" in result
+
+ def test_singular_label_for_two_overloads(self, sample_mdx_with_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "Show 1 other overload" in result
+
+
+class TestPropertyPairNotCollapsed:
+ def test_property_getter_setter_kept_separate(self, sample_mdx_with_property_pair: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_property_pair)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "" not in result
+ assert result.count("#### `value`") == 2
+
+ def test_property_getter_setter_deleter_kept_separate(self, sample_mdx_with_property_triplet: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_with_property_triplet)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert "" not in result
+ assert result.count("#### `value`") == 3
+
+ def test_property_pair_alongside_real_overloads(self, sample_mdx_property_and_overloads: str) -> None:
+ # Arrange
+ doc = _build_collapsed_doc(sample_mdx_property_and_overloads)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result.count("#### `value`") == 2
+ assert "" in result
+ assert "#### `get`" in result
+
+
+class TestNoOverloads:
+ def test_empty_content_passes_through(self) -> None:
+ # Arrange
+ content = "# minimal"
+ doc = _build_collapsed_doc(content)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result == content
+
+
+def _build_collapsed_doc(content: str) -> CollapsedOverloadCodeDocumentation:
+ """Build a ``CollapsedOverloadCodeDocumentation`` with a stub inner documentation."""
+ inner = StubDocumentation({FILE_KEY: MdxFile(name=FILE_KEY, content=content, source_path=Path("test.py"))})
+ return CollapsedOverloadCodeDocumentation(documentation=inner)
+
+
+# --- Fixtures ---
+
+
+@pytest.fixture
+def sample_mdx_no_overloads() -> str:
+ return """\
+---
+title: test
+---
+
+# `test_module`
+
+## Classes
+
+### `MyClass`
+
+A simple class.
+
+**Methods:**
+
+#### `save`
+
+```python
+save(self, data: str) -> None
+```
+
+#### `delete`
+
+```python
+delete(self, id: str) -> None
+```"""
+
+
+@pytest.fixture
+def sample_mdx_with_overloads() -> str:
+ return """\
+---
+title: client
+sidebarTitle: client
+---
+
+# `infrahub_sdk.client`
+
+## Classes
+
+### `InfrahubClient`
+
+GraphQL Client to interact with Infrahub.
+
+**Methods:**
+
+#### `get`
+
+```python
+get(self, kind: str) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType]) -> SchemaType
+```
+
+#### `get`
+
+```python
+get(self, kind: str | type[SchemaType], raise_when_missing: bool = True) -> InfrahubNode | SchemaType | None
+```
+
+#### `create`
+
+```python
+create(self, kind: str) -> InfrahubNode
+```
+
+#### `create`
+
+```python
+create(self, kind: str, data: dict | None = None, branch: str | None = None, timeout: int | None = None) -> InfrahubNode
+```
+
+#### `delete`
+
+```python
+delete(self, kind: str, id: str) -> None
+```"""
+
+
+@pytest.fixture
+def sample_mdx_with_property_pair() -> str:
+ return """\
+---
+title: attribute
+sidebarTitle: attribute
+---
+
+# `infrahub_sdk.node.attribute`
+
+## Classes
+
+### `Attribute`
+
+Represents an attribute of a Node.
+
+**Methods:**
+
+#### `value`
+
+```python
+value(self) -> Any
+```
+
+#### `value`
+
+```python
+value(self, value: Any) -> None
+```"""
+
+
+@pytest.fixture
+def sample_mdx_property_and_overloads() -> str:
+ return """\
+---
+title: attribute
+sidebarTitle: attribute
+---
+
+# `infrahub_sdk.node.attribute`
+
+## Classes
+
+### `Attribute`
+
+Represents an attribute of a Node.
+
+**Methods:**
+
+#### `value`
+
+```python
+value(self) -> Any
+```
+
+#### `value`
+
+```python
+value(self, value: Any) -> None
+```
+
+#### `get`
+
+```python
+get(self, kind: str) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str, id: int) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str, id: int, branch: str) -> InfrahubNode
+```"""
+
+
+@pytest.fixture
+def sample_mdx_with_property_triplet() -> str:
+ return """\
+---
+title: attribute
+sidebarTitle: attribute
+---
+
+# `infrahub_sdk.node.attribute`
+
+## Classes
+
+### `Attribute`
+
+Represents an attribute of a Node.
+
+**Methods:**
+
+#### `value`
+
+```python
+value(self) -> Any
+```
+
+#### `value`
+
+```python
+value(self, value: Any) -> None
+```
+
+#### `value`
+
+```python
+value(self) -> None
+```"""
diff --git a/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_section.py b/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_section.py
new file mode 100644
index 00000000..8c25f3ec
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_mdx_collapsed_overload_section.py
@@ -0,0 +1,148 @@
+"""Tests for CollapsedOverloadSection."""
+
+from __future__ import annotations
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_collapsed_overload_section import (
+ CollapsedOverloadSection,
+)
+from docs.docs_generation.content_gen_methods.mdx.mdx_section import ASection
+
+from .conftest import make_method_section
+
+
+class TestCollapsedOverloadSection:
+ def test_heading_delegates_to_primary(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str)")
+ section = CollapsedOverloadSection(primary=primary, others=[])
+
+ # Act
+ result = section.heading
+
+ # Assert
+ assert result == primary.heading
+
+ def test_no_others_returns_primary_content_only(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str)")
+ section = CollapsedOverloadSection(primary=primary, others=[])
+
+ # Act
+ result = section.content
+
+ # Assert
+ assert result == primary.content
+ assert "" not in "\n".join(result)
+
+ def test_others_rendered_in_details_block(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str, id: int)")
+ other1 = make_method_section("get", "get(self, kind: str)")
+ other2 = make_method_section("get", "get(self)")
+ section = CollapsedOverloadSection(primary=primary, others=[other1, other2])
+
+ # Act
+ result = "\n".join(section.content)
+
+ # Assert
+ assert "" in result
+ assert "Show 2 other overloads
" in result
+ assert " " in result
+
+ def test_singular_label_for_one_other(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str)")
+ other = make_method_section("get", "get(self)")
+ section = CollapsedOverloadSection(primary=primary, others=[other])
+
+ # Act
+ result = "\n".join(section.content)
+
+ # Assert
+ assert "Show 1 other overload
" in result
+
+ def test_plural_label_for_multiple_others(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, a: int, b: int, c: int)")
+ others = [make_method_section("get", f"get(self, x{i}: int)") for i in range(3)]
+ section = CollapsedOverloadSection(primary=primary, others=others)
+
+ # Act
+ result = "\n".join(section.content)
+
+ # Assert
+ assert "Show 3 other overloads
" in result
+
+ def test_lines_includes_heading_plus_content(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str)")
+ section = CollapsedOverloadSection(primary=primary, others=[])
+
+ # Act
+ result = section.lines
+
+ # Assert
+ assert result[0] == section.heading
+ assert result[1:] == section.content
+
+ def test_is_asection_subclass(self) -> None:
+ # Assert
+ assert issubclass(CollapsedOverloadSection, ASection)
+
+ def test_details_block_contains_other_section_lines(self) -> None:
+ # Arrange
+ primary = make_method_section("get", "get(self, kind: str, id: int)")
+ other = make_method_section("get", "get(self, kind: str)", docstring="Get by kind.")
+ section = CollapsedOverloadSection(primary=primary, others=[other])
+
+ # Act
+ result = "\n".join(section.content)
+
+ # Assert
+ assert "Get by kind." in result
+ assert "get(self, kind: str)" in result
+
+
+class TestCollapsedOverloadSectionFromOverloads:
+ def test_primary_is_overload_with_most_params(self) -> None:
+ # Arrange
+ s1 = make_method_section("get", "get(self, a: int, b: int)")
+ s2 = make_method_section("get", "get(self, a: int, b: int, c: int, d: int, e: int)")
+ s3 = make_method_section("get", "get(self, a: int, b: int, c: int)")
+
+ # Act
+ section = CollapsedOverloadSection.from_overloads([s1, s2, s3])
+
+ # Assert
+ assert "e: int" in "\n".join(section.primary.lines)
+ assert len(section.others) == 2
+
+ def test_tie_breaking_selects_first_in_source_order(self) -> None:
+ # Arrange
+ s1 = make_method_section("get", "get(self, a: int, b: str)")
+ s2 = make_method_section("get", "get(self, x: int, y: str)")
+
+ # Act
+ section = CollapsedOverloadSection.from_overloads([s1, s2])
+
+ # Assert
+ assert "a: int" in "\n".join(section.primary.lines)
+ assert len(section.others) == 1
+
+ def test_single_section_returns_no_others(self) -> None:
+ # Arrange
+ s1 = make_method_section("get", "get(self, kind: str)")
+
+ # Act
+ section = CollapsedOverloadSection.from_overloads([s1])
+
+ # Assert
+ assert section.primary is s1
+ assert section.others == []
+
+ def test_empty_list_raises(self) -> None:
+ # Act / Assert
+ with pytest.raises(ValueError, match="empty"):
+ CollapsedOverloadSection.from_overloads([])
diff --git a/tests/unit/doc_generation/mdx/test_mdx_method_signature.py b/tests/unit/doc_generation/mdx/test_mdx_method_signature.py
new file mode 100644
index 00000000..f9a18606
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_mdx_method_signature.py
@@ -0,0 +1,196 @@
+"""Tests for MethodSignature."""
+
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_collapsed_overload_section import (
+ MethodSignature,
+)
+from docs.docs_generation.content_gen_methods.mdx.mdx_section import MdxSection
+
+from .conftest import make_method_section
+
+
+class TestMethodSignatureParamCount:
+ def test_simple_signature_returns_correct_count(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: str, id: int)"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 2
+
+ def test_self_only_returns_zero(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self)"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 0
+
+ def test_kwargs_counts_as_one(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, **kwargs: Any)"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 1
+
+ def test_args_and_kwargs_count_separately(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, *args: str, **kwargs: Any)"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 2
+
+ def test_nested_brackets_not_split(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: dict[str, int], other: list[str])"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 2
+
+ def test_deeply_nested_generics(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, x: dict[str, list[tuple[int, ...]]])"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 1
+
+ def test_signature_with_return_type(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: str) -> InfrahubNode"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 1
+
+ def test_default_values_dont_affect_count(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: str = ..., id: int = None)"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 2
+
+ def test_real_world_get_signature(self) -> None:
+ # Arrange
+ signature = (
+ "get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, "
+ "at: Timestamp | None = None, branch: str | None = None, "
+ "timeout: int | None = None, id: str | None = None, "
+ "hfid: list[str] | None = None, include: list[str] | None = None, "
+ "exclude: list[str] | None = None, populate_store: bool = True, "
+ "fragment: bool = False, prefetch_relationships: bool = False, "
+ "property: bool = False, include_metadata: bool = False, "
+ "**kwargs: Any) -> InfrahubNode | SchemaType | None"
+ )
+ sig = MethodSignature(make_method_section("get", signature))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 15
+
+ def test_empty_signature(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get()"))
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 0
+
+ def test_no_code_fence_returns_zero(self) -> None:
+ # Arrange
+ section = MdxSection(name="get", heading_level=4, _lines=["#### `get`", "", "Some description."])
+ sig = MethodSignature(section)
+
+ # Act
+ result = sig.param_count()
+
+ # Assert
+ assert result == 0
+
+
+class TestMethodSignatureReturnType:
+ def test_returns_none_type(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("value", "value(self, value: Any) -> None"))
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert result == "None"
+
+ def test_returns_concrete_type(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("value", "value(self) -> Any"))
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert result == "Any"
+
+ def test_no_return_annotation_returns_empty(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: str)"))
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert not result
+
+ def test_generic_return_type(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self) -> dict[str, list[int]]"))
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert result == "dict[str, list[int]]"
+
+ def test_union_return_type(self) -> None:
+ # Arrange
+ sig = MethodSignature(make_method_section("get", "get(self, kind: str) -> InfrahubNode | None"))
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert result == "InfrahubNode | None"
+
+ def test_no_code_fence_returns_empty(self) -> None:
+ # Arrange
+ section = MdxSection(name="get", heading_level=4, _lines=["#### `get`", "", "Some description."])
+ sig = MethodSignature(section)
+
+ # Act
+ result = sig.return_type()
+
+ # Assert
+ assert not result
diff --git a/tests/unit/doc_generation/mdx/test_mdx_ordered_section.py b/tests/unit/doc_generation/mdx/test_mdx_ordered_section.py
new file mode 100644
index 00000000..f279ca8e
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_mdx_ordered_section.py
@@ -0,0 +1,95 @@
+"""Tests for OrderedMdxSection."""
+
+from __future__ import annotations
+
+import re
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_ordered_section import OrderedMdxSection
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import SectionPriority
+from docs.docs_generation.content_gen_methods.mdx.mdx_section import ASection, MdxSection
+
+
+class TestOrderedMdxSection:
+ def test_content_returns_reordered_children(self) -> None:
+ # Arrange
+ children = [
+ "### `Alpha`\n",
+ "Alpha body\n",
+ "### `Bravo`\n",
+ "Bravo body\n",
+ "### `Charlie`\n",
+ "Charlie body",
+ ]
+ ordered = _make_ordered("Classes", 2, children, priority=SectionPriority(names=["Charlie", "Alpha"]))
+
+ # Act
+ content = ordered.content
+
+ # Assert
+ content_str = "\n".join(content)
+ names = re.findall(r"^### `([^`]+)`", content_str, re.MULTILINE)
+ assert names == ["Charlie", "Alpha", "Bravo"]
+
+ def test_lines_includes_heading_plus_ordered_content(self) -> None:
+ # Arrange
+ children = [
+ "### `A`\n",
+ "### `B`\n",
+ ]
+ ordered = _make_ordered("Classes", 2, children, priority=SectionPriority(names=["B"]))
+
+ # Act
+ lines = ordered.lines
+
+ # Assert
+ assert lines[0] == "## `Classes`"
+ names = re.findall(r"^### `([^`]+)`", "\n".join(lines), re.MULTILINE)
+ assert names == ["B", "A"]
+
+ def test_empty_priority_returns_original_content(self) -> None:
+ # Arrange
+ ordered = _make_ordered("Sec", 2, ["### `X`\n", "body"], priority=SectionPriority())
+ base = MdxSection(name="Sec", heading_level=2, _lines=["## `Sec`", "### `X`\n", "body"])
+
+ # Act
+ content = ordered.content
+
+ # Assert
+ assert content == base.content
+
+ def test_no_children_returns_original_content(self) -> None:
+ # Arrange
+ ordered = _make_ordered("Sec", 2, ["Just some text"], priority=SectionPriority(names=["Anything"]))
+ base = MdxSection(name="Sec", heading_level=2, _lines=["## `Sec`", "Just some text"])
+
+ # Act
+ content = ordered.content
+
+ # Assert
+ assert content == base.content
+
+ def test_is_asection_subclass(self) -> None:
+ # Arrange
+ section = MdxSection(name="MySection", heading_level=2, _lines=["## `MySection`"])
+ ordered = OrderedMdxSection(section=section, priority=SectionPriority(), child_heading_level=3)
+
+ # Assert
+ assert isinstance(ordered, ASection)
+ assert isinstance(section, ASection)
+ assert ordered.heading == "## `MySection`"
+
+
+def _make_ordered(
+ name: str,
+ heading_level: int,
+ children_lines: list[str],
+ priority: SectionPriority,
+ child_heading_level: int = 3,
+) -> OrderedMdxSection:
+ heading = "#" * heading_level + f" `{name}`"
+ section = MdxSection(name=name, heading_level=heading_level, _lines=[heading, *children_lines])
+ return OrderedMdxSection(
+ section=section,
+ priority=priority,
+ child_heading_level=child_heading_level,
+ )
diff --git a/tests/unit/doc_generation/mdx/test_reorder_classes.py b/tests/unit/doc_generation/mdx/test_reorder_classes.py
new file mode 100644
index 00000000..0c8ebf8f
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_reorder_classes.py
@@ -0,0 +1,93 @@
+"""Tests for class reordering in OrderedMdxCodeDocumentation."""
+
+from __future__ import annotations
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
+
+from .conftest import FILE_KEY, MOCK_CONTEXT, MODULES, build_ordered_doc, class_order
+
+
+class TestReorderClasses:
+ def test_single_priority_class_moves_to_top(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(classes=["InfrahubClient"])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = class_order(result)
+ assert order[0] == "InfrahubClient"
+
+ def test_multiple_priority_classes_in_specified_order(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(classes=["InfrahubClientSync", "InfrahubClient"])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = class_order(result)
+ assert order[0] == "InfrahubClientSync"
+ assert order[1] == "InfrahubClient"
+
+ def test_non_priority_classes_retain_original_order(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(classes=["InfrahubClient"])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = class_order(result)
+ remaining = order[1:]
+ assert remaining == ["ProcessRelationsNode", "BaseClient", "InfrahubClientSync"]
+
+ def test_no_priority_config_returns_unchanged(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority()
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result == sample_mdx
+
+ def test_empty_classes_list_returns_unchanged(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(classes=[])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result == sample_mdx
+
+ def test_nonexistent_class_name_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ fake_class_name = "DoesNotExist"
+ priority = PagePriority(classes=[fake_class_name])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=fake_class_name):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+ def test_reorder_page_without_methods(self, sample_mdx_no_methods: str) -> None:
+ # Arrange
+ priority = PagePriority(classes=["InfrahubNodeMode"])
+ doc = build_ordered_doc(sample_mdx_no_methods, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = class_order(result)
+ assert order == ["InfrahubNodeMode", "RelatedNodeState"]
diff --git a/tests/unit/doc_generation/mdx/test_reorder_methods.py b/tests/unit/doc_generation/mdx/test_reorder_methods.py
new file mode 100644
index 00000000..3e588a54
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_reorder_methods.py
@@ -0,0 +1,108 @@
+"""Tests for method reordering in OrderedMdxCodeDocumentation."""
+
+from __future__ import annotations
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
+
+from .conftest import FILE_KEY, MOCK_CONTEXT, MODULES, build_ordered_doc, class_order, method_order
+
+
+class TestReorderMethods:
+ def test_single_priority_method_moves_to_top(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(methods={"InfrahubClient": ["save"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert method_order(result, "InfrahubClient")[0] == "save"
+
+ def test_multiple_priority_methods_in_specified_order(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(methods={"InfrahubClient": ["delete", "save"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = method_order(result, "InfrahubClient")
+ assert order[0] == "delete"
+ assert order[1] == "save"
+
+ def test_non_priority_methods_retain_original_order(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(methods={"InfrahubClient": ["save"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = method_order(result, "InfrahubClient")
+ assert order[0] == "save"
+ assert order[1:] == ["get_version", "create", "get", "get", "delete"]
+
+ def test_method_only_priority_no_class_reordering(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(methods={"InfrahubClient": ["save"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert class_order(result) == ["ProcessRelationsNode", "BaseClient", "InfrahubClient", "InfrahubClientSync"]
+
+ def test_combined_class_and_method_reordering(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(
+ classes=["InfrahubClient"],
+ methods={"InfrahubClient": ["save"]},
+ )
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert class_order(result)[0] == "InfrahubClient"
+ assert method_order(result, "InfrahubClient")[0] == "save"
+
+ def test_overloaded_methods_move_together(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(methods={"InfrahubClient": ["get"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ order = method_order(result, "InfrahubClient")
+ assert order[0] == "get"
+ assert order[1] == "get"
+ assert order[2:] == ["get_version", "create", "save", "delete"]
+
+ def test_nonexistent_method_name_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ fake_method_name = "nonexistent"
+ priority = PagePriority(methods={"InfrahubClient": [fake_method_name, "save"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=("%s" % fake_method_name)):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+ def test_method_priority_for_nonexistent_class_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ fake_class_name = "DoesNotExist"
+ priority = PagePriority(methods={("%s" % fake_class_name): ["get"]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=fake_class_name):
+ doc.generate(MOCK_CONTEXT, MODULES)
diff --git a/tests/unit/doc_generation/mdx/test_reorder_sections.py b/tests/unit/doc_generation/mdx/test_reorder_sections.py
new file mode 100644
index 00000000..12d1b087
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_reorder_sections.py
@@ -0,0 +1,48 @@
+"""Tests for section reordering in OrderedMdxCodeDocumentation."""
+
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
+
+from .conftest import FILE_KEY, MOCK_CONTEXT, MODULES, build_ordered_doc, class_order, method_order, section_order
+
+
+class TestReorderSections:
+ def test_section_priority_moves_classes_before_functions(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(sections=["Classes"])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert section_order(result) == ["Classes", "Functions"]
+
+ def test_combined_section_class_and_method_reordering(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(
+ sections=["Classes"],
+ classes=["InfrahubClient"],
+ methods={"InfrahubClient": ["save"]},
+ )
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert section_order(result) == ["Classes", "Functions"]
+ assert class_order(result)[0] == "InfrahubClient"
+ assert method_order(result, "InfrahubClient")[0] == "save"
+
+ def test_no_section_priority_retains_original_order(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(sections=[])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)[FILE_KEY].content
+
+ # Assert
+ assert result == sample_mdx
diff --git a/tests/unit/doc_generation/mdx/test_validate_priorities.py b/tests/unit/doc_generation/mdx/test_validate_priorities.py
new file mode 100644
index 00000000..be73b765
--- /dev/null
+++ b/tests/unit/doc_generation/mdx/test_validate_priorities.py
@@ -0,0 +1,91 @@
+"""Tests for priority validation in OrderedMdxCodeDocumentation."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import MdxFile
+from docs.docs_generation.content_gen_methods.mdx.mdx_ordered_code_doc import OrderedMdxCodeDocumentation
+from docs.docs_generation.content_gen_methods.mdx.mdx_priority import PagePriority
+
+from .conftest import FILE_KEY, MOCK_CONTEXT, MODULES, StubDocumentation, build_ordered_doc
+
+
+class TestNonexistentPriorities:
+ def test_nonexistent_file_key_raises(self) -> None:
+ # Arrange
+ content = "# some content"
+ inner = StubDocumentation({"actual.mdx": MdxFile(name="actual.mdx", content=content, source_path=Path("a.py"))})
+ fake_file_name = "missing.mdx"
+ doc = OrderedMdxCodeDocumentation(
+ documentation=inner,
+ page_priorities={fake_file_name: PagePriority(classes=["Foo"])},
+ )
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=fake_file_name):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+ def test_nonexistent_class_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ fake_class_name = "NoSuchClass"
+ priority = PagePriority(classes=[fake_class_name])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=fake_class_name):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+ def test_nonexistent_method_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ fake_method_name = "no_such_method"
+ priority = PagePriority(methods={"InfrahubClient": [fake_method_name]})
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match=fake_method_name):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+ def test_nonexistent_section_raises(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(sections=["NoSuchSection"])
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act / Assert
+ with pytest.raises(ValueError, match="NoSuchSection"):
+ doc.generate(MOCK_CONTEXT, MODULES)
+
+
+class TestValidPriorities:
+ def test_valid_config_no_error(self, sample_mdx: str) -> None:
+ # Arrange
+ priority = PagePriority(
+ classes=["InfrahubClient"],
+ methods={"InfrahubClient": ["save"]},
+ )
+ doc = build_ordered_doc(sample_mdx, priority)
+
+ # Act
+ result = doc.generate(MOCK_CONTEXT, MODULES)
+
+ # Assert
+ assert FILE_KEY in result
+
+
+class TestDuplicatePriorities:
+ def test_duplicate_class_names_raises(self) -> None:
+ # Act / Assert
+ with pytest.raises(ValueError, match="Duplicate class 'InfrahubClient'"):
+ PagePriority(classes=["InfrahubClient", "InfrahubClient"])
+
+ def test_duplicate_method_names_raises(self) -> None:
+ # Act / Assert
+ with pytest.raises(ValueError, match="Duplicate method 'save'"):
+ PagePriority(methods={"InfrahubClient": ["save", "save"]})
+
+ def test_duplicate_section_names_raises(self) -> None:
+ # Act / Assert
+ with pytest.raises(ValueError, match="Duplicate section 'Classes'"):
+ PagePriority(sections=["Classes", "Classes"])
diff --git a/tests/unit/doc_generation/pages/__init__.py b/tests/unit/doc_generation/pages/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/pages/test_doc_page.py b/tests/unit/doc_generation/pages/test_doc_page.py
new file mode 100644
index 00000000..51dcf190
--- /dev/null
+++ b/tests/unit/doc_generation/pages/test_doc_page.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation.content_gen_methods import ADocContentGenMethod
+from docs.docs_generation.pages import DocPage, MDXDocPage
+
+
+class StubContentGenMethod(ADocContentGenMethod):
+ def __init__(self, content: str) -> None:
+ self._content = content
+
+ def apply(self) -> str:
+ return self._content
+
+
+class TestDocPage:
+ def test_content_delegates_to_method(self) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("test content"))
+
+ # Act
+ result = page.content()
+
+ # Assert
+ assert result == "test content"
+
+
+class TestMDXDocPage:
+ def test_to_mdx_writes_file(self, tmp_path: Path) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("# Hello MDX"))
+ output_path = tmp_path / "output.mdx"
+
+ # Act
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+ # Assert
+ assert output_path.exists()
+ assert output_path.read_text(encoding="utf-8") == "# Hello MDX"
+
+ def test_to_mdx_creates_parent_directories(self, tmp_path: Path) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("content"))
+ output_path = tmp_path / "nested" / "dir" / "output.mdx"
+
+ # Act
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+ # Assert
+ assert output_path.exists()
+ assert output_path.read_text(encoding="utf-8") == "content"
diff --git a/tests/unit/doc_generation/test_docs_validate.py b/tests/unit/doc_generation/test_docs_validate.py
new file mode 100644
index 00000000..b65f5bbf
--- /dev/null
+++ b/tests/unit/doc_generation/test_docs_validate.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+import os
+import subprocess # noqa: S404
+from pathlib import Path
+
+import pytest
+from invoke import Context, Exit
+
+import tasks
+
+_GIT_ENV = {
+ "GIT_AUTHOR_NAME": "test",
+ "GIT_AUTHOR_EMAIL": "test@test.com",
+ "GIT_COMMITTER_NAME": "test",
+ "GIT_COMMITTER_EMAIL": "test@test.com",
+ "PATH": os.environ.get("PATH", ""),
+ "HOME": os.environ.get("HOME", ""),
+}
+
+
+def _git(repo: Path, *args: str) -> None:
+ """Run a git command inside *repo* with deterministic author info."""
+ subprocess.check_call(["git", *args], cwd=repo, env={**_GIT_ENV, "HOME": str(repo)}) # noqa: S603, S607
+
+
+@pytest.fixture
+def git_repo_with_docs(tmp_path: Path) -> Path:
+ """Create a temporary git repo with a committed docs/ directory."""
+ docs_dir = tmp_path / "docs"
+ docs_dir.mkdir()
+ (docs_dir / "generated.mdx").write_text("# Original content\n")
+
+ _git(tmp_path, "init")
+ _git(tmp_path, "add", ".")
+ _git(tmp_path, "commit", "-m", "initial")
+ return tmp_path
+
+
+class TestDocsValidate:
+ """Ensure docs_validate() detects drift between committed and regenerated documentation."""
+
+ def test_passes_when_generation_produces_no_changes(
+ self, git_repo_with_docs: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
+ # Arrange
+ monkeypatch.setattr(tasks, "docs_generate", lambda context: None) # noqa: ARG005
+ monkeypatch.setattr(tasks, "DOCUMENTATION_DIRECTORY", git_repo_with_docs)
+
+ # Act / Assert — no exception means docs are in sync
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_modifies_existing_file(
+ self, git_repo_with_docs: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "generated.mdx").write_text("# Modified content\n")
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "DOCUMENTATION_DIRECTORY", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="out of sync"):
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_deletes_tracked_file(
+ self, git_repo_with_docs: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "generated.mdx").unlink()
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "DOCUMENTATION_DIRECTORY", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="Modified or deleted files"):
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_creates_new_file(
+ self, git_repo_with_docs: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "new_file.mdx").write_text("# New\n")
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "DOCUMENTATION_DIRECTORY", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="New untracked files"):
+ tasks.docs_validate(Context())
diff --git a/tests/unit/doc_generation/test_helpers.py b/tests/unit/doc_generation/test_helpers.py
new file mode 100644
index 00000000..359654f0
--- /dev/null
+++ b/tests/unit/doc_generation/test_helpers.py
@@ -0,0 +1,77 @@
+from __future__ import annotations
+
+from docs.docs_generation.helpers import build_config_properties, get_env_vars
+
+
+class TestGetEnvVars:
+ def test_returns_dict(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ assert isinstance(result, dict)
+
+ def test_values_are_lists_of_strings(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ for key, values in result.items():
+ assert isinstance(key, str)
+ assert isinstance(values, list)
+ for v in values:
+ assert isinstance(v, str)
+
+ def test_env_vars_are_upper_case(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ for values in result.values():
+ for v in values:
+ assert v == v.upper()
+
+ def test_address_field_has_env_var(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ assert "address" in result
+ assert len(result["address"]) > 0
+
+
+class TestBuildConfigProperties:
+ def test_returns_list(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ assert isinstance(result, list)
+ assert len(result) > 0
+
+ def test_each_property_has_required_keys(self) -> None:
+ # Arrange
+ required_keys = {"name", "description", "type", "choices", "default", "env_vars"}
+
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ for prop in result:
+ assert required_keys.issubset(prop.keys())
+
+ def test_address_property_exists(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ names = [p["name"] for p in result]
+ assert "address" in names
+
+ def test_address_has_env_vars(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ address_prop = next(p for p in result if p["name"] == "address")
+ assert len(address_prop["env_vars"]) > 0
diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py
index 8fb9ecf2..ad725532 100644
--- a/tests/unit/sdk/conftest.py
+++ b/tests/unit/sdk/conftest.py
@@ -1015,115 +1015,6 @@ async def ipam_ipprefix_data() -> dict[str, Any]:
}
-@pytest.fixture
-async def ipaddress_pool_schema() -> NodeSchemaAPI:
- data = {
- "name": "IPAddressPool",
- "namespace": "Core",
- "description": "A pool of IP address resources",
- "label": "IP Address Pool",
- "default_filter": "name__value",
- "order_by": ["name__value"],
- "display_labels": ["name__value"],
- "include_in_menu": False,
- "branch": BranchSupportType.AGNOSTIC.value,
- "inherit_from": ["CoreResourcePool"],
- "attributes": [
- {
- "name": "default_address_type",
- "kind": "Text",
- "optional": False,
- "description": "The object type to create when reserving a resource in the pool",
- },
- {
- "name": "default_prefix_length",
- "kind": "Number",
- "optional": True,
- },
- ],
- "relationships": [
- {
- "name": "resources",
- "peer": "BuiltinIPPrefix",
- "kind": "Attribute",
- "identifier": "ipaddresspool__resource",
- "cardinality": "many",
- "optional": False,
- "order_weight": 4000,
- },
- {
- "name": "ip_namespace",
- "peer": "BuiltinIPNamespace",
- "kind": "Attribute",
- "identifier": "ipaddresspool__ipnamespace",
- "cardinality": "one",
- "optional": False,
- "order_weight": 5000,
- },
- ],
- }
- return NodeSchema(**data).convert_api()
-
-
-@pytest.fixture
-async def ipprefix_pool_schema() -> NodeSchemaAPI:
- data = {
- "name": "IPPrefixPool",
- "namespace": "Core",
- "description": "A pool of IP prefix resources",
- "label": "IP Prefix Pool",
- "include_in_menu": False,
- "branch": BranchSupportType.AGNOSTIC.value,
- "inherit_from": ["CoreResourcePool"],
- "attributes": [
- {
- "name": "default_prefix_length",
- "kind": "Number",
- "description": "The default prefix length as an integer for prefixes allocated from this pool.",
- "optional": True,
- "order_weight": 5000,
- },
- {
- "name": "default_member_type",
- "kind": "Text",
- "enum": ["prefix", "address"],
- "default_value": "prefix",
- "optional": True,
- "order_weight": 3000,
- },
- {
- "name": "default_prefix_type",
- "kind": "Text",
- "optional": True,
- "order_weight": 4000,
- },
- ],
- "relationships": [
- {
- "name": "resources",
- "peer": "BuiltinIPPrefix",
- "kind": "Attribute",
- "identifier": "prefixpool__resource",
- "cardinality": "many",
- "branch": BranchSupportType.AGNOSTIC.value,
- "optional": False,
- "order_weight": 6000,
- },
- {
- "name": "ip_namespace",
- "peer": "BuiltinIPNamespace",
- "kind": "Attribute",
- "identifier": "prefixpool__ipnamespace",
- "cardinality": "one",
- "branch": BranchSupportType.AGNOSTIC.value,
- "optional": False,
- "order_weight": 7000,
- },
- ],
- }
- return NodeSchema(**data).convert_api()
-
-
@pytest.fixture
async def address_schema() -> NodeSchemaAPI:
data = {
@@ -2645,3 +2536,69 @@ async def nested_device_with_interfaces_schema() -> NodeSchemaAPI:
],
}
return NodeSchema(**data).convert_api()
+
+
+@pytest.fixture
+async def file_object_schema() -> NodeSchemaAPI:
+ """Schema for a node that inherits from CoreFileObject."""
+ data = {
+ "name": "CircuitContract",
+ "namespace": "Network",
+ "label": "Circuit Contract",
+ "default_filter": "file_name__value",
+ "inherit_from": ["CoreFileObject"],
+ "order_by": ["file_name__value"],
+ "display_labels": ["file_name__value"],
+ "attributes": [
+ # Simulate inherited attributes from CoreFileObject
+ {"name": "file_name", "kind": "Text", "read_only": True, "optional": False},
+ {"name": "checksum", "kind": "Text", "read_only": True, "optional": False},
+ {"name": "file_size", "kind": "Number", "read_only": True, "optional": False},
+ {"name": "file_type", "kind": "Text", "read_only": True, "optional": False},
+ {"name": "storage_id", "kind": "Text", "read_only": True, "optional": False},
+ {"name": "contract_start", "kind": "DateTime", "optional": False},
+ {"name": "contract_end", "kind": "DateTime", "optional": False},
+ ],
+ "relationships": [],
+ }
+ return NodeSchema(**data).convert_api()
+
+
+@pytest.fixture
+async def non_file_object_schema() -> NodeSchemaAPI:
+ """Schema for a regular node that does not inherit from CoreFileObject."""
+ data = {
+ "name": "Device",
+ "namespace": "Infra",
+ "label": "Device",
+ "default_filter": "name__value",
+ "inherit_from": [],
+ "order_by": ["name__value"],
+ "display_labels": ["name__value"],
+ "attributes": [
+ {"name": "name", "kind": "Text", "unique": True},
+ {"name": "description", "kind": "Text", "optional": True},
+ ],
+ "relationships": [],
+ }
+ return NodeSchema(**data).convert_api()
+
+
+@pytest.fixture
+async def vlan_schema() -> NodeSchemaAPI:
+ data = {
+ "name": "VLAN",
+ "namespace": "Infra",
+ "label": "VLAN",
+ "default_filter": "name__value",
+ "order_by": ["name__value"],
+ "display_labels": ["name__value"],
+ "attributes": [
+ {"name": "name", "kind": "Text", "unique": True},
+ {"name": "vlan_id", "kind": "Number"},
+ {"name": "role", "kind": "Text", "optional": True},
+ {"name": "status", "kind": "Text", "optional": True},
+ ],
+ "relationships": [],
+ }
+ return NodeSchema(**data).convert_api()
diff --git a/tests/unit/sdk/graphql/test_multipart.py b/tests/unit/sdk/graphql/test_multipart.py
new file mode 100644
index 00000000..35a0e58a
--- /dev/null
+++ b/tests/unit/sdk/graphql/test_multipart.py
@@ -0,0 +1,177 @@
+"""Unit tests for MultipartBuilder class."""
+
+from __future__ import annotations
+
+from io import BytesIO
+
+import ujson
+
+from infrahub_sdk.graphql import MultipartBuilder
+
+
+def test_build_operations_simple() -> None:
+ """Test building operations with simple query and variables."""
+ query = "mutation($file: Upload!) { upload(file: $file) { id } }"
+ variables = {"other": "value"}
+
+ result = MultipartBuilder.build_operations(query=query, variables=variables)
+
+ parsed = ujson.loads(result)
+ assert parsed["query"] == query
+ assert parsed["variables"] == variables
+
+
+def test_build_operations_empty_variables() -> None:
+ """Test building operations with empty variables."""
+ query = "mutation { doSomething { id } }"
+ variables: dict[str, str] = {}
+
+ result = MultipartBuilder.build_operations(query=query, variables=variables)
+
+ parsed = ujson.loads(result)
+ assert parsed["query"] == query
+ assert parsed["variables"] == {}
+
+
+def test_build_operations_complex_variables() -> None:
+ """Test building operations with nested variables."""
+ query = "mutation($input: CreateInput!) { create(input: $input) { id } }"
+ variables = {"input": {"name": "test", "nested": {"value": 123}, "list": [1, 2, 3]}}
+
+ result = MultipartBuilder.build_operations(query=query, variables=variables)
+
+ parsed = ujson.loads(result)
+ assert parsed["variables"]["input"]["name"] == "test"
+ assert parsed["variables"]["input"]["nested"]["value"] == 123
+ assert parsed["variables"]["input"]["list"] == [1, 2, 3]
+
+
+def test_build_file_map_defaults() -> None:
+ """Test building file map with default values."""
+ result = MultipartBuilder.build_file_map()
+
+ parsed = ujson.loads(result)
+ assert parsed == {"0": ["variables.file"]}
+
+
+def test_build_file_map_custom_key() -> None:
+ """Test building file map with custom file key."""
+ result = MultipartBuilder.build_file_map(file_key="1")
+
+ parsed = ujson.loads(result)
+ assert parsed == {"1": ["variables.file"]}
+
+
+def test_build_file_map_custom_path() -> None:
+ """Test building file map with custom variable path."""
+ result = MultipartBuilder.build_file_map(variable_path="variables.input.document")
+
+ parsed = ujson.loads(result)
+ assert parsed == {"0": ["variables.input.document"]}
+
+
+def test_build_file_map_both_custom() -> None:
+ """Test building file map with both custom values."""
+ result = MultipartBuilder.build_file_map(file_key="attachment", variable_path="variables.attachment")
+
+ parsed = ujson.loads(result)
+ assert parsed == {"attachment": ["variables.attachment"]}
+
+
+def test_build_payload_with_file() -> None:
+ """Test building complete payload with file content."""
+ query = "mutation($file: Upload!) { upload(file: $file) { id } }"
+ variables = {"other": "value"}
+ file_content = BytesIO(b"test file content")
+ file_name = "document.pdf"
+
+ result = MultipartBuilder.build_payload(
+ query=query, variables=variables, file_content=file_content, file_name=file_name
+ )
+
+ # Check operations
+ assert "operations" in result
+ assert result["operations"][0] is None # No filename for operations
+ operations_json = ujson.loads(result["operations"][1])
+ assert operations_json["query"] == query
+ assert operations_json["variables"]["other"] == "value"
+ assert operations_json["variables"]["file"] is None # File var should be null
+
+ # Check map
+ assert "map" in result
+ assert result["map"][0] is None
+ map_json = ujson.loads(result["map"][1])
+ assert map_json == {"0": ["variables.file"]}
+
+ # Check file
+ assert "0" in result
+ assert result["0"][0] == file_name
+ assert result["0"][1] is file_content
+
+
+def test_build_payload_without_file() -> None:
+ """Test building payload without file content."""
+ query = "mutation($file: Upload!) { upload(file: $file) { id } }"
+ variables = {"other": "value"}
+
+ result = MultipartBuilder.build_payload(query=query, variables=variables, file_content=None, file_name="unused.txt")
+
+ # Should have operations and map
+ assert "operations" in result
+ assert "map" in result
+
+ # Should NOT have file key
+ assert "0" not in result
+
+
+def test_build_payload_sets_file_var_to_null() -> None:
+ """Test that build_payload sets file variable to null per spec."""
+ query = "mutation($file: Upload!) { upload(file: $file) { id } }"
+ variables = {"file": "should_be_overwritten", "other": "value"}
+ file_content = BytesIO(b"content")
+
+ result = MultipartBuilder.build_payload(
+ query=query, variables=variables, file_content=file_content, file_name="test.txt"
+ )
+
+ operations_json = ujson.loads(result["operations"][1])
+ assert operations_json["variables"]["file"] is None
+ assert operations_json["variables"]["other"] == "value"
+
+
+def test_build_payload_default_filename() -> None:
+ """Test that default filename is used when not specified."""
+ query = "mutation($file: Upload!) { upload(file: $file) { id } }"
+ file_content = BytesIO(b"content")
+
+ result = MultipartBuilder.build_payload(
+ query=query,
+ variables={},
+ file_content=file_content,
+ )
+
+ assert result["0"][0] == "upload"
+
+
+def test_build_payload_preserves_existing_variables() -> None:
+ """Test that existing variables are preserved in the payload."""
+ query = "mutation($file: Upload!, $nodeId: ID!) { upload(file: $file, node: $nodeId) { id } }"
+ variables = {
+ "nodeId": "node-123",
+ "description": "A test file",
+ "nested": {"key": "value"},
+ }
+ file_content = BytesIO(b"content")
+
+ result = MultipartBuilder.build_payload(
+ query=query,
+ variables=variables,
+ file_content=file_content,
+ file_name="test.txt",
+ )
+
+ operations_json = ujson.loads(result["operations"][1])
+ assert operations_json["variables"]["nodeId"] == "node-123"
+ assert operations_json["variables"]["description"] == "A test file"
+ assert operations_json["variables"]["nested"] == {"key": "value"}
+ assert operations_json["variables"]["file"] is None # file is null per spec
diff --git a/tests/unit/sdk/pool/__init__.py b/tests/unit/sdk/pool/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/sdk/pool/conftest.py b/tests/unit/sdk/pool/conftest.py
new file mode 100644
index 00000000..9d0fd245
--- /dev/null
+++ b/tests/unit/sdk/pool/conftest.py
@@ -0,0 +1,116 @@
+from __future__ import annotations
+
+from typing import Any
+
+import pytest
+
+from infrahub_sdk.schema import BranchSupportType, NodeSchema, NodeSchemaAPI
+
+
+@pytest.fixture
+async def ipaddress_pool_schema() -> NodeSchemaAPI:
+ data: dict[str, Any] = {
+ "name": "IPAddressPool",
+ "namespace": "Core",
+ "description": "A pool of IP address resources",
+ "label": "IP Address Pool",
+ "default_filter": "name__value",
+ "order_by": ["name__value"],
+ "display_labels": ["name__value"],
+ "include_in_menu": False,
+ "branch": BranchSupportType.AGNOSTIC.value,
+ "inherit_from": ["CoreResourcePool"],
+ "attributes": [
+ {
+ "name": "default_address_type",
+ "kind": "Text",
+ "optional": False,
+ "description": "The object type to create when reserving a resource in the pool",
+ },
+ {
+ "name": "default_prefix_length",
+ "kind": "Number",
+ "optional": True,
+ },
+ ],
+ "relationships": [
+ {
+ "name": "resources",
+ "peer": "BuiltinIPPrefix",
+ "kind": "Attribute",
+ "identifier": "ipaddresspool__resource",
+ "cardinality": "many",
+ "optional": False,
+ "order_weight": 4000,
+ },
+ {
+ "name": "ip_namespace",
+ "peer": "BuiltinIPNamespace",
+ "kind": "Attribute",
+ "identifier": "ipaddresspool__ipnamespace",
+ "cardinality": "one",
+ "optional": False,
+ "order_weight": 5000,
+ },
+ ],
+ }
+ return NodeSchema(**data).convert_api()
+
+
+@pytest.fixture
+async def ipprefix_pool_schema() -> NodeSchemaAPI:
+ data: dict[str, Any] = {
+ "name": "IPPrefixPool",
+ "namespace": "Core",
+ "description": "A pool of IP prefix resources",
+ "label": "IP Prefix Pool",
+ "include_in_menu": False,
+ "branch": BranchSupportType.AGNOSTIC.value,
+ "inherit_from": ["CoreResourcePool"],
+ "attributes": [
+ {
+ "name": "default_prefix_length",
+ "kind": "Number",
+ "description": "The default prefix length as an integer for prefixes allocated from this pool.",
+ "optional": True,
+ "order_weight": 5000,
+ },
+ {
+ "name": "default_member_type",
+ "kind": "Text",
+ "enum": ["prefix", "address"],
+ "default_value": "prefix",
+ "optional": True,
+ "order_weight": 3000,
+ },
+ {
+ "name": "default_prefix_type",
+ "kind": "Text",
+ "optional": True,
+ "order_weight": 4000,
+ },
+ ],
+ "relationships": [
+ {
+ "name": "resources",
+ "peer": "BuiltinIPPrefix",
+ "kind": "Attribute",
+ "identifier": "prefixpool__resource",
+ "cardinality": "many",
+ "branch": BranchSupportType.AGNOSTIC.value,
+ "optional": False,
+ "order_weight": 6000,
+ },
+ {
+ "name": "ip_namespace",
+ "peer": "BuiltinIPNamespace",
+ "kind": "Attribute",
+ "identifier": "prefixpool__ipnamespace",
+ "cardinality": "one",
+ "branch": BranchSupportType.AGNOSTIC.value,
+ "optional": False,
+ "order_weight": 7000,
+ },
+ ],
+ }
+ return NodeSchema(**data).convert_api()
diff --git a/tests/unit/sdk/pool/test_allocate.py b/tests/unit/sdk/pool/test_allocate.py
new file mode 100644
index 00000000..eacc1a7b
--- /dev/null
+++ b/tests/unit/sdk/pool/test_allocate.py
@@ -0,0 +1,220 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, cast
+
+import pytest
+
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+
+if TYPE_CHECKING:
+ from typing import Any
+
+ from pytest_httpx import HTTPXMock
+
+ from infrahub_sdk.protocols_base import CoreNode, CoreNodeSync
+ from infrahub_sdk.schema import NodeSchemaAPI
+ from tests.unit.sdk.conftest import BothClients
+
+client_types = ["standard", "sync"]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_allocate_next_ip_address(
+ httpx_mock: HTTPXMock,
+ mock_schema_query_ipam: HTTPXMock,
+ clients: BothClients,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "InfrahubIPAddressPoolGetResource": {
+ "ok": True,
+ "node": {
+ "id": "17da1246-54f1-a9c0-2784-179f0ec5b128",
+ "kind": "IpamIPAddress",
+ "identifier": "test",
+ "display_label": "192.0.2.0/32",
+ },
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "allocate-ip-loopback"},
+ is_reusable=True,
+ )
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "IpamIPAddress": {
+ "count": 1,
+ "edges": [
+ {
+ "node": {
+ "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "__typename": "IpamIPAddress",
+ "address": {"value": "192.0.2.0/32"},
+ "description": {"value": "test"},
+ }
+ }
+ ],
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"},
+ is_reusable=True,
+ )
+
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ ip_address = await clients.standard.allocate_next_ip_address(
+ resource_pool=cast("CoreNode", ip_pool),
+ identifier="test",
+ prefix_length=32,
+ address_type="IpamIPAddress",
+ data={"description": "test"},
+ tracker="allocate-ip-loopback",
+ )
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ ip_address = clients.sync.allocate_next_ip_address(
+ resource_pool=cast("CoreNodeSync", ip_pool),
+ identifier="test",
+ prefix_length=32,
+ address_type="IpamIPAddress",
+ data={"description": "test"},
+ tracker="allocate-ip-loopback",
+ )
+
+ assert ip_address
+ assert str(cast("InfrahubNodeSync", ip_address).address.value) == "192.0.2.0/32"
+ assert cast("InfrahubNodeSync", ip_address).description.value == "test"
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_allocate_next_ip_prefix(
+ httpx_mock: HTTPXMock,
+ mock_schema_query_ipam: HTTPXMock,
+ clients: BothClients,
+ ipprefix_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "InfrahubIPPrefixPoolGetResource": {
+ "ok": True,
+ "node": {
+ "id": "7d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "kind": "IpamIPPrefix",
+ "identifier": "test",
+ "display_label": "192.0.2.0/31",
+ },
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "allocate-ip-interco"},
+ is_reusable=True,
+ )
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "IpamIPPrefix": {
+ "count": 1,
+ "edges": [
+ {
+ "node": {
+ "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "__typename": "IpamIPPrefix",
+ "prefix": {"value": "192.0.2.0/31"},
+ "description": {"value": "test"},
+ }
+ }
+ ],
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "query-ipamipprefix-page1"},
+ is_reusable=True,
+ )
+
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipprefix_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core intercos",
+ "default_prefix_type": "IpamIPPrefix",
+ "default_prefix_length": 31,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ ip_prefix = await clients.standard.allocate_next_ip_prefix(
+ resource_pool=cast("CoreNode", ip_pool),
+ identifier="test",
+ prefix_length=31,
+ prefix_type="IpamIPPrefix",
+ data={"description": "test"},
+ tracker="allocate-ip-interco",
+ )
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipprefix_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core intercos",
+ "default_prefix_type": "IpamIPPrefix",
+ "default_prefix_length": 31,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ ip_prefix = clients.sync.allocate_next_ip_prefix(
+ resource_pool=cast("CoreNodeSync", ip_pool),
+ identifier="test",
+ prefix_length=31,
+ prefix_type="IpamIPPrefix",
+ data={"description": "test"},
+ tracker="allocate-ip-interco",
+ )
+
+ assert ip_prefix
+ assert str(cast("InfrahubNodeSync", ip_prefix).prefix.value) == "192.0.2.0/31" # type: ignore[unresolved-attribute]
+ assert cast("InfrahubNodeSync", ip_prefix).description.value == "test" # type: ignore[unresolved-attribute]
diff --git a/tests/unit/sdk/pool/test_attribute_from_pool.py b/tests/unit/sdk/pool/test_attribute_from_pool.py
new file mode 100644
index 00000000..75d63f6d
--- /dev/null
+++ b/tests/unit/sdk/pool/test_attribute_from_pool.py
@@ -0,0 +1,204 @@
+"""
+When using from_pool on a number attribute (e.g. vlan_id), the SDK should generate:
+ vlan_id: { from_pool: { id: "...", identifier: "..." } }
+
+There are two ways to request a pool allocation:
+1. Dict-based: {"from_pool": {"id": "...", "identifier": "..."}}
+2. Node-based: pass an InfrahubNode pool object as the attribute value
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+
+if TYPE_CHECKING:
+ from infrahub_sdk import InfrahubClient, InfrahubClientSync
+ from infrahub_sdk.schema import NodeSchemaAPI
+
+
+POOL_ID = "185b9728-1b76-dda7-d13d-106529b1bcd9"
+
+
+# ──────────────────────────────────────────────
+# Dict-based from_pool - async client
+# ──────────────────────────────────────────────
+
+
+async def test_number_attribute_from_pool_with_identifier(
+ client: InfrahubClient,
+ vlan_schema: NodeSchemaAPI,
+) -> None:
+ """A number attribute with from_pool and identifier should NOT be wrapped in value."""
+ data: dict[str, Any] = {
+ "name": "Example VLAN",
+ "vlan_id": {"from_pool": {"id": POOL_ID, "identifier": "test"}},
+ "role": "user",
+ "status": "active",
+ }
+ node = InfrahubNode(client=client, schema=vlan_schema, data=data)
+
+ # Act
+ input_data = node._generate_input_data()["data"]["data"]
+
+ assert input_data["name"] == {"value": "Example VLAN"}
+ assert input_data["role"] == {"value": "user"}
+ assert input_data["status"] == {"value": "active"}
+ assert input_data["vlan_id"] == {"from_pool": {"id": POOL_ID, "identifier": "test"}}
+ assert "value" not in input_data["vlan_id"]
+
+
+async def test_number_attribute_regular_value(
+ client: InfrahubClient,
+ vlan_schema: NodeSchemaAPI,
+) -> None:
+ """Regular number values should still be wrapped in value as before."""
+ data: dict[str, Any] = {
+ "name": "Example VLAN",
+ "vlan_id": 100,
+ }
+ node = InfrahubNode(client=client, schema=vlan_schema, data=data)
+
+ # Act
+ input_data = node._generate_input_data()["data"]["data"]
+
+ assert input_data["name"] == {"value": "Example VLAN"}
+ assert input_data["vlan_id"] == {"value": 100}
+
+
+async def test_number_attribute_from_pool_mutation_query(
+ client: InfrahubClient,
+ vlan_schema: NodeSchemaAPI,
+) -> None:
+ """A from_pool dict attribute should request value back in the mutation query."""
+ data: dict[str, Any] = {
+ "name": "Example VLAN",
+ "vlan_id": {"from_pool": {"id": POOL_ID, "identifier": "test"}},
+ }
+ node = InfrahubNode(client=client, schema=vlan_schema, data=data)
+
+ # Act
+ mutation_query = node._generate_mutation_query()
+
+ assert mutation_query["object"]["vlan_id"] == {"value": None}
+
+
+# ──────────────────────────────────────────────
+# Dict-based from_pool - sync client
+# ──────────────────────────────────────────────
+
+
+async def test_sync_number_attribute_from_pool_with_identifier(
+ client_sync: InfrahubClientSync,
+ vlan_schema: NodeSchemaAPI,
+) -> None:
+ """A number attribute with from_pool and identifier should NOT be wrapped in value (sync client)."""
+ data: dict[str, Any] = {
+ "name": "Example VLAN",
+ "vlan_id": {"from_pool": {"id": POOL_ID, "identifier": "test"}},
+ "role": "user",
+ "status": "active",
+ }
+ node = InfrahubNodeSync(client=client_sync, schema=vlan_schema, data=data)
+
+ # Act
+ input_data = node._generate_input_data()["data"]["data"]
+
+ assert input_data["name"] == {"value": "Example VLAN"}
+ assert input_data["role"] == {"value": "user"}
+ assert input_data["status"] == {"value": "active"}
+ assert input_data["vlan_id"] == {"from_pool": {"id": POOL_ID, "identifier": "test"}}
+ assert "value" not in input_data["vlan_id"]
+
+
+async def test_sync_number_attribute_regular_value(
+ client_sync: InfrahubClientSync,
+ vlan_schema: NodeSchemaAPI,
+) -> None:
+ """Regular number values should still be wrapped in value as before (sync client)."""
+ data: dict[str, Any] = {
+ "name": "Example VLAN",
+ "vlan_id": 100,
+ }
+ node = InfrahubNodeSync(client=client_sync, schema=vlan_schema, data=data)
+
+ # Act
+ input_data = node._generate_input_data()["data"]["data"]
+
+ assert input_data["name"] == {"value": "Example VLAN"}
+ assert input_data["vlan_id"] == {"value": 100}
+
+
+# ──────────────────────────────────────────────
+# Node-based from_pool - async client
+# ──────────────────────────────────────────────
+
+NODE_POOL_ID = "185b9728-1b56-dda7-d13d-106535b1bcd9"
+
+
+async def test_attribute_with_pool_node_generates_from_pool(
+ client: InfrahubClient,
+ vlan_schema: NodeSchemaAPI,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+) -> None:
+ """When an attribute value is a CoreNodeBase pool node, _generate_input_data should produce from_pool."""
+ ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=client,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": NODE_POOL_ID,
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ vlan = InfrahubNode(
+ client=client,
+ schema=vlan_schema,
+ data={"name": "Example VLAN", "vlan_id": ip_pool},
+ )
+
+ # Act
+ input_data = vlan._generate_input_data()["data"]["data"]
+
+ assert input_data["vlan_id"] == {"from_pool": {"id": NODE_POOL_ID}}
+ assert "value" not in input_data["vlan_id"]
+
+
+async def test_attribute_with_pool_node_generates_mutation_query(
+ client: InfrahubClient,
+ vlan_schema: NodeSchemaAPI,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+) -> None:
+ """When an attribute value is a CoreNodeBase pool node, _generate_mutation_query should request value back."""
+ ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=client,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": NODE_POOL_ID,
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ vlan = InfrahubNode(
+ client=client,
+ schema=vlan_schema,
+ data={"name": "Example VLAN", "vlan_id": ip_pool},
+ )
+
+ # Act
+ mutation_query = vlan._generate_mutation_query()
+
+ assert mutation_query["object"]["vlan_id"] == {"value": None}
diff --git a/tests/unit/sdk/pool/test_pool_queries.py b/tests/unit/sdk/pool/test_pool_queries.py
new file mode 100644
index 00000000..4f27cba7
--- /dev/null
+++ b/tests/unit/sdk/pool/test_pool_queries.py
@@ -0,0 +1,185 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+
+if TYPE_CHECKING:
+ from typing import Any
+
+ from pytest_httpx import HTTPXMock
+
+ from infrahub_sdk.schema import NodeSchemaAPI
+ from tests.unit.sdk.conftest import BothClients
+
+client_types = ["standard", "sync"]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_get_pool_allocated_resources(
+ httpx_mock: HTTPXMock,
+ mock_schema_query_ipam: HTTPXMock,
+ clients: BothClients,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "InfrahubResourcePoolAllocated": {
+ "count": 2,
+ "edges": [
+ {
+ "node": {
+ "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "kind": "IpamIPAddress",
+ "branch": "main",
+ "identifier": "ip-1",
+ }
+ },
+ {
+ "node": {
+ "id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
+ "kind": "IpamIPAddress",
+ "branch": "main",
+ "identifier": "ip-2",
+ }
+ },
+ ],
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "get-allocated-resources-page1"},
+ )
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "IpamIPAddress": {
+ "count": 2,
+ "edges": [
+ {"node": {"id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", "__typename": "IpamIPAddress"}},
+ {"node": {"id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67", "__typename": "IpamIPAddress"}},
+ ],
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"},
+ )
+
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+
+ resources = await ip_pool.get_pool_allocated_resources(resource=ip_prefix)
+ assert len(resources) == 2
+ assert [resource.id for resource in resources] == [
+ "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
+ ]
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+
+ resources = ip_pool.get_pool_allocated_resources(resource=ip_prefix)
+ assert len(resources) == 2
+ assert [resource.id for resource in resources] == [
+ "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
+ "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
+ ]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_get_pool_resources_utilization(
+ httpx_mock: HTTPXMock,
+ clients: BothClients,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "InfrahubResourcePoolUtilization": {
+ "count": 1,
+ "edges": [
+ {
+ "node": {
+ "id": "17d9bd86-3471-a020-2782-179ff078e58f",
+ "utilization": 93.75,
+ "utilization_branches": 0,
+ "utilization_default_branch": 93.75,
+ }
+ }
+ ],
+ }
+ }
+ },
+ match_headers={"X-Infrahub-Tracker": "get-pool-utilization"},
+ )
+
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+
+ utilizations = await ip_pool.get_pool_resources_utilization()
+ assert len(utilizations) == 1
+ assert utilizations[0]["utilization"] == 93.75
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+
+ utilizations = ip_pool.get_pool_resources_utilization()
+ assert len(utilizations) == 1
+ assert utilizations[0]["utilization"] == 93.75
diff --git a/tests/unit/sdk/pool/test_relationship_from_pool.py b/tests/unit/sdk/pool/test_relationship_from_pool.py
new file mode 100644
index 00000000..f8c44c6b
--- /dev/null
+++ b/tests/unit/sdk/pool/test_relationship_from_pool.py
@@ -0,0 +1,130 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+
+if TYPE_CHECKING:
+ from typing import Any
+
+ from infrahub_sdk.schema import NodeSchemaAPI
+ from tests.unit.sdk.conftest import BothClients
+
+client_types = ["standard", "sync"]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_create_input_data_with_resource_pool_relationship(
+ clients: BothClients,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ simple_device_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ device = InfrahubNode(
+ client=clients.standard,
+ schema=simple_device_schema,
+ data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
+ )
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ device = InfrahubNodeSync(
+ client=clients.sync,
+ schema=simple_device_schema,
+ data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
+ )
+
+ assert device._generate_input_data()["data"] == {
+ "data": {
+ "name": {"value": "device-01"},
+ "primary_address": {"from_pool": {"id": "pppppppp-pppp-pppp-pppp-pppppppppppp"}},
+ "ip_address_pool": {"id": "pppppppp-pppp-pppp-pppp-pppppppppppp"},
+ },
+ }
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_create_mutation_query_with_resource_pool_relationship(
+ clients: BothClients,
+ ipaddress_pool_schema: NodeSchemaAPI,
+ ipam_ipprefix_schema: NodeSchemaAPI,
+ simple_device_schema: NodeSchemaAPI,
+ ipam_ipprefix_data: dict[str, Any],
+ client_type: str,
+) -> None:
+ if client_type == "standard":
+ ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNode(
+ client=clients.standard,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ device = InfrahubNode(
+ client=clients.standard,
+ schema=simple_device_schema,
+ data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
+ )
+ else:
+ ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
+ ip_pool = InfrahubNodeSync(
+ client=clients.sync,
+ schema=ipaddress_pool_schema,
+ data={
+ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
+ "name": "Core loopbacks",
+ "default_address_type": "IpamIPAddress",
+ "default_prefix_length": 32,
+ "ip_namespace": "ip_namespace",
+ "resources": [ip_prefix],
+ },
+ )
+ device = InfrahubNodeSync(
+ client=clients.sync,
+ schema=simple_device_schema,
+ data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
+ )
+
+ assert device._generate_mutation_query() == {
+ "object": {
+ "id": None,
+ "primary_address": {"node": {"__typename": None, "display_label": None, "id": None}},
+ "ip_address_pool": {"node": {"__typename": None, "display_label": None, "id": None}},
+ },
+ "ok": None,
+ }
diff --git a/tests/unit/sdk/spec/test_object.py b/tests/unit/sdk/spec/test_object.py
index 90b248b1..edb06891 100644
--- a/tests/unit/sdk/spec/test_object.py
+++ b/tests/unit/sdk/spec/test_object.py
@@ -236,7 +236,7 @@ async def test_validate_object_expansion_multiple_ranges_bad_syntax(
]
-@pytest.mark.parametrize("data,is_valid,format", get_relationship_info_testdata)
+@pytest.mark.parametrize(("data", "is_valid", "format"), get_relationship_info_testdata)
async def test_get_relationship_info_tags(
client_with_schema_01: InfrahubClient,
data: dict | list,
diff --git a/tests/unit/sdk/test_attribute_generate_input_data.py b/tests/unit/sdk/test_attribute_generate_input_data.py
new file mode 100644
index 00000000..394623fc
--- /dev/null
+++ b/tests/unit/sdk/test_attribute_generate_input_data.py
@@ -0,0 +1,404 @@
+"""Unit tests for Attribute._generate_input_data covering all code paths."""
+
+from __future__ import annotations
+
+from typing import Any
+
+import pytest
+
+from infrahub_sdk.node.attribute import Attribute
+from infrahub_sdk.protocols_base import CoreNodeBase
+from infrahub_sdk.schema import AttributeSchemaAPI
+from infrahub_sdk.schema.main import AttributeKind
+
+# ──────────────────────────────────────────────
+# Value resolution: from_pool (dict-based)
+# ──────────────────────────────────────────────
+
+
+class TestFromPoolDict:
+ def test_from_pool_with_id(self) -> None:
+ pool_data = {"id": "pool-uuid-1"}
+ attr = Attribute(name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data={"from_pool": pool_data})
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"from_pool": {"id": "pool-uuid-1"}}
+ assert result.variables == {}
+
+ def test_from_pool_with_id_and_identifier(self) -> None:
+ pool_data = {"id": "pool-uuid-1", "identifier": "test"}
+ attr = Attribute(name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data={"from_pool": pool_data})
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"from_pool": {"id": "pool-uuid-1", "identifier": "test"}}
+ assert result.variables == {}
+
+ def test_from_pool_with_pool_name(self) -> None:
+ """from_pool can be a plain string (pool name), e.g. from_pool: 'VLAN ID Pool'."""
+ attr = Attribute(
+ name="vlan_id", schema=_make_schema(AttributeKind.NUMBER, optional=True), data={"from_pool": "VLAN ID Pool"}
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"from_pool": "VLAN ID Pool"}
+ assert result.variables == {}
+ assert "value" not in result.payload
+
+ def test_from_pool_value_is_none(self) -> None:
+ """from_pool pops 'from_pool' and sets Attribute.value to None; value should NOT appear in payload."""
+ attr = Attribute(
+ name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data={"from_pool": {"id": "pool-uuid-1"}}
+ )
+
+ assert attr.value is None
+ result = attr._generate_input_data()
+ assert "value" not in result.payload
+
+
+# ──────────────────────────────────────────────
+# Value resolution: from_pool (node-based)
+# ──────────────────────────────────────────────
+
+
+class TestFromPoolNode:
+ def test_pool_node_generates_from_pool(self) -> None:
+ pool_node = _FakeNode(node_id="node-pool-uuid", is_pool=True)
+
+ attr = Attribute(name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data=pool_node)
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"from_pool": {"id": "node-pool-uuid"}}
+ assert result.variables == {}
+
+ def test_non_pool_node_treated_as_regular_value(self) -> None:
+ """A CoreNodeBase that is NOT a resource pool should go through the normal value path."""
+ node = _FakeNode(node_id="regular-node-uuid", is_pool=False)
+ attr = Attribute(name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data=node)
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": node}
+
+
+# ──────────────────────────────────────────────
+# Value resolution: null values
+# ──────────────────────────────────────────────
+
+
+class TestNullValue:
+ def test_null_value_not_mutated(self) -> None:
+ """None value that was never mutated → empty payload, no properties."""
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data={"value": None})
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {}
+ assert result.variables == {}
+ assert result.needs_metadata is False
+
+ def test_null_value_mutated_optional(self) -> None:
+ """None value on an optional attr that was mutated → explicit null."""
+ attr = Attribute(
+ name="test_attr", schema=_make_schema(AttributeKind.TEXT, optional=True), data={"value": "initial"}
+ )
+ attr.value = None # triggers value_has_been_mutated
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": None}
+ assert result.needs_metadata is False
+
+ def test_null_value_mutated_non_optional(self) -> None:
+ """None value on a non-optional attr that was mutated → empty payload (same as not mutated)."""
+ attr = Attribute(
+ name="test_attr", schema=_make_schema(AttributeKind.TEXT, optional=False), data={"value": "initial"}
+ )
+ attr.value = None
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {}
+ assert result.needs_metadata is False
+
+
+# ──────────────────────────────────────────────
+# Value resolution: strings (safe vs unsafe)
+# ──────────────────────────────────────────────
+
+
+class TestStringValues:
+ @pytest.mark.parametrize(
+ "value",
+ [
+ pytest.param("simple", id="alphanumeric"),
+ pytest.param("user.name", id="dots"),
+ pytest.param("/opt/repos/infrahub", id="filepath"),
+ pytest.param("https://github.com/opsmill", id="url"),
+ pytest.param("", id="empty-string"),
+ ],
+ )
+ def test_safe_string(self, value: str) -> None:
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data=value)
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": value}
+ assert result.variables == {}
+
+ @pytest.mark.parametrize(
+ "value",
+ [
+ pytest.param('has "quotes"', id="quotes"),
+ pytest.param("has\nnewline", id="newline"),
+ pytest.param("special{chars}", id="braces"),
+ ],
+ )
+ def test_unsafe_string_uses_variable_binding(self, value: str) -> None:
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data=value)
+
+ result = attr._generate_input_data()
+
+ # payload["value"] should be a variable reference like "$value_"
+ assert "value" in result.payload
+ assert result.payload["value"].startswith("$value_")
+ # The actual string should be in variables
+ assert len(result.variables) == 1
+ var_name = next(iter(result.variables))
+ assert result.variables[var_name] == value
+
+
+# ──────────────────────────────────────────────
+# Value resolution: IP types
+# ──────────────────────────────────────────────
+
+
+class TestIPValues:
+ def test_ipv4_interface(self) -> None:
+ attr = Attribute(name="address", schema=_make_schema(AttributeKind.IPHOST), data={"value": "10.0.0.1/24"})
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "10.0.0.1/24"
+ assert result.variables == {}
+
+ def test_ipv6_interface(self) -> None:
+ attr = Attribute(name="address", schema=_make_schema(AttributeKind.IPHOST), data={"value": "2001:db8::1/64"})
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "2001:db8::1/64"
+
+ def test_ipv4_network(self) -> None:
+ attr = Attribute(name="network", schema=_make_schema(AttributeKind.IPNETWORK), data={"value": "10.0.0.0/24"})
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "10.0.0.0/24"
+
+ def test_ipv6_network(self) -> None:
+ attr = Attribute(name="network", schema=_make_schema(AttributeKind.IPNETWORK), data={"value": "2001:db8::/32"})
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "2001:db8::/32"
+
+
+# ──────────────────────────────────────────────
+# Value resolution: other scalars
+# ──────────────────────────────────────────────
+
+
+class TestScalarValues:
+ def test_number_value(self) -> None:
+ attr = Attribute(name="vlan_id", schema=_make_schema(AttributeKind.NUMBER), data=42)
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": 42}
+ assert result.variables == {}
+
+ def test_boolean_value(self) -> None:
+ attr = Attribute(name="enabled", schema=_make_schema(AttributeKind.BOOLEAN), data=True)
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": True}
+
+
+# ──────────────────────────────────────────────
+# Property handling
+# ──────────────────────────────────────────────
+
+
+class TestProperties:
+ def test_no_properties_set(self) -> None:
+ """When no properties are set, payload only has the value."""
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data="hello")
+
+ result = attr._generate_input_data()
+
+ assert result.payload == {"value": "hello"}
+
+ def test_flag_property_is_protected(self) -> None:
+ attr = Attribute(
+ name="test_attr", schema=_make_schema(AttributeKind.TEXT), data={"value": "hello", "is_protected": True}
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "hello"
+ assert result.payload["is_protected"] is True
+
+ def test_object_property_source(self) -> None:
+ attr = Attribute(
+ name="test_attr",
+ schema=_make_schema(AttributeKind.TEXT),
+ data={"value": "hello", "source": {"id": "source-uuid", "display_label": "Git", "__typename": "CoreGit"}},
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "hello"
+ assert result.payload["source"] == "source-uuid"
+
+ def test_object_property_owner(self) -> None:
+ attr = Attribute(
+ name="test_attr",
+ schema=_make_schema(AttributeKind.TEXT),
+ data={
+ "value": "hello",
+ "owner": {"id": "owner-uuid", "display_label": "Admin", "__typename": "CoreAccount"},
+ },
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload["owner"] == "owner-uuid"
+
+ def test_both_flag_and_object_properties(self) -> None:
+ attr = Attribute(
+ name="test_attr",
+ schema=_make_schema(AttributeKind.TEXT),
+ data={
+ "value": "hello",
+ "is_protected": True,
+ "source": {"id": "src-uuid", "display_label": "Git", "__typename": "CoreGit"},
+ },
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload["value"] == "hello"
+ assert result.payload["is_protected"] is True
+ assert result.payload["source"] == "src-uuid"
+
+ def test_properties_not_appended_for_null_value(self) -> None:
+ """When need_additional_properties is False (null non-mutated), properties are ignored."""
+ attr = Attribute(
+ name="test_attr",
+ schema=_make_schema(AttributeKind.TEXT),
+ data={
+ "value": None,
+ "is_protected": True,
+ "source": {"id": "src-uuid", "display_label": "Git", "__typename": "CoreGit"},
+ },
+ )
+
+ result = attr._generate_input_data()
+
+ # Null value, not mutated → empty payload, properties NOT appended
+ assert result.payload == {}
+
+ def test_properties_appended_for_from_pool(self) -> None:
+ """from_pool payloads have need_additional_properties=True, so properties are included."""
+ attr = Attribute(
+ name="vlan_id",
+ schema=_make_schema(AttributeKind.NUMBER),
+ data={"from_pool": {"id": "pool-uuid"}, "is_protected": True},
+ )
+
+ result = attr._generate_input_data()
+
+ assert result.payload["from_pool"] == {"id": "pool-uuid"}
+ assert result.payload["is_protected"] is True
+
+
+# ──────────────────────────────────────────────
+# Return type: to_dict() integration
+# ──────────────────────────────────────────────
+
+
+class TestToDictIntegration:
+ def test_to_dict_simple_value(self) -> None:
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data="hello")
+
+ result = attr._generate_input_data().to_dict()
+
+ assert result == {"data": {"value": "hello"}, "variables": {}}
+
+ def test_to_dict_with_variables(self) -> None:
+ attr = Attribute(name="test_attr", schema=_make_schema(AttributeKind.TEXT), data='has "quotes"')
+
+ result = attr._generate_input_data().to_dict()
+
+ assert "data" in result
+ assert "variables" in result
+ assert len(result["variables"]) == 1
+ var_name = next(iter(result["variables"]))
+ assert result["variables"][var_name] == 'has "quotes"'
+ assert result["data"]["value"] == f"${var_name}"
+
+
+def _make_schema(kind: AttributeKind = AttributeKind.TEXT, optional: bool = False) -> AttributeSchemaAPI:
+ return AttributeSchemaAPI(name="test_attr", kind=kind, optional=optional)
+
+
+class _FakeNode(CoreNodeBase):
+ """Minimal CoreNodeBase implementation for testing."""
+
+ def __init__(self, node_id: str, is_pool: bool) -> None:
+ self.id = node_id
+ self._is_pool = is_pool
+ self._schema: Any = None
+ self._internal_id = ""
+ self.display_label = None
+ self.typename = None
+
+ @property
+ def hfid(self) -> list[str] | None:
+ return None
+
+ @property
+ def hfid_str(self) -> str | None:
+ return None
+
+ def get_human_friendly_id(self) -> list[str] | None:
+ return None
+
+ def get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None:
+ return None
+
+ def get_kind(self) -> str:
+ return ""
+
+ def get_all_kinds(self) -> list[str]:
+ return []
+
+ def get_branch(self) -> str:
+ return ""
+
+ def is_ip_prefix(self) -> bool:
+ return False
+
+ def is_ip_address(self) -> bool:
+ return False
+
+ def is_resource_pool(self) -> bool:
+ return self._is_pool
+
+ def get_raw_graphql_data(self) -> dict | None:
+ return None
diff --git a/tests/unit/sdk/test_client.py b/tests/unit/sdk/test_client.py
index e9cce23e..13346c02 100644
--- a/tests/unit/sdk/test_client.py
+++ b/tests/unit/sdk/test_client.py
@@ -14,11 +14,9 @@
if TYPE_CHECKING:
from collections.abc import Callable, Mapping
from inspect import Parameter
- from typing import Any
from pytest_httpx import HTTPXMock
- from infrahub_sdk.schema import NodeSchemaAPI
from tests.unit.sdk.conftest import BothClients
pytestmark = pytest.mark.httpx_mock(can_send_already_matched_responses=True)
@@ -280,7 +278,7 @@ async def test_method_all_multiple_pages(
assert len(repos) == 5
-@pytest.mark.parametrize("client_type, use_parallel", batch_client_types)
+@pytest.mark.parametrize(("client_type", "use_parallel"), batch_client_types)
async def test_method_all_batching(
clients: BothClients,
mock_query_location_batch_count: HTTPXMock,
@@ -636,208 +634,6 @@ async def test_method_filters_empty(
assert len(repos) == 0
-@pytest.mark.parametrize("client_type", client_types)
-async def test_allocate_next_ip_address(
- httpx_mock: HTTPXMock,
- mock_schema_query_ipam: HTTPXMock,
- clients: BothClients,
- ipaddress_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "InfrahubIPAddressPoolGetResource": {
- "ok": True,
- "node": {
- "id": "17da1246-54f1-a9c0-2784-179f0ec5b128",
- "kind": "IpamIPAddress",
- "identifier": "test",
- "display_label": "192.0.2.0/32",
- },
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "allocate-ip-loopback"},
- is_reusable=True,
- )
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "IpamIPAddress": {
- "count": 1,
- "edges": [
- {
- "node": {
- "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "__typename": "IpamIPAddress",
- "address": {"value": "192.0.2.0/32"},
- "description": {"value": "test"},
- }
- }
- ],
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"},
- is_reusable=True,
- )
-
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=clients.standard,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- ip_address = await clients.standard.allocate_next_ip_address(
- resource_pool=ip_pool,
- identifier="test",
- prefix_length=32,
- address_type="IpamIPAddress",
- data={"description": "test"},
- tracker="allocate-ip-loopback",
- )
- else:
- ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=clients.sync,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- ip_address = clients.sync.allocate_next_ip_address(
- resource_pool=ip_pool,
- identifier="test",
- prefix_length=32,
- address_type="IpamIPAddress",
- data={"description": "test"},
- tracker="allocate-ip-loopback",
- )
-
- assert ip_address
- assert str(ip_address.address.value) == "192.0.2.0/32"
- assert ip_address.description.value == "test"
-
-
-@pytest.mark.parametrize("client_type", client_types)
-async def test_allocate_next_ip_prefix(
- httpx_mock: HTTPXMock,
- mock_schema_query_ipam: HTTPXMock,
- clients: BothClients,
- ipprefix_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "InfrahubIPPrefixPoolGetResource": {
- "ok": True,
- "node": {
- "id": "7d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "kind": "IpamIPPrefix",
- "identifier": "test",
- "display_label": "192.0.2.0/31",
- },
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "allocate-ip-interco"},
- is_reusable=True,
- )
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "IpamIPPrefix": {
- "count": 1,
- "edges": [
- {
- "node": {
- "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "__typename": "IpamIPPrefix",
- "prefix": {"value": "192.0.2.0/31"},
- "description": {"value": "test"},
- }
- }
- ],
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "query-ipamipprefix-page1"},
- is_reusable=True,
- )
-
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=clients.standard,
- schema=ipprefix_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core intercos",
- "default_prefix_type": "IpamIPPrefix",
- "default_prefix_length": 31,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- ip_prefix = await clients.standard.allocate_next_ip_prefix(
- resource_pool=ip_pool,
- identifier="test",
- prefix_length=31,
- prefix_type="IpamIPPrefix",
- data={"description": "test"},
- tracker="allocate-ip-interco",
- )
- else:
- ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=clients.sync,
- schema=ipprefix_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core intercos",
- "default_prefix_type": "IpamIPPrefix",
- "default_prefix_length": 31,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- ip_prefix = clients.sync.allocate_next_ip_prefix(
- resource_pool=ip_pool,
- identifier="test",
- prefix_length=31,
- prefix_type="IpamIPPrefix",
- data={"description": "test"},
- tracker="allocate-ip-interco",
- )
-
- assert ip_prefix
- assert str(ip_prefix.prefix.value) == "192.0.2.0/31"
- assert ip_prefix.description.value == "test"
-
-
EXPECTED_ECHO = """URL: http://mock/graphql/main
QUERY:
diff --git a/tests/unit/sdk/test_file_handler.py b/tests/unit/sdk/test_file_handler.py
new file mode 100644
index 00000000..ae59c842
--- /dev/null
+++ b/tests/unit/sdk/test_file_handler.py
@@ -0,0 +1,305 @@
+from __future__ import annotations
+
+import tempfile
+from io import BytesIO
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import anyio
+import httpx
+import pytest
+
+from infrahub_sdk.exceptions import AuthenticationError, NodeNotFoundError
+from infrahub_sdk.file_handler import FileHandler, FileHandlerBase, FileHandlerSync, PreparedFile
+
+if TYPE_CHECKING:
+ from pytest_httpx import HTTPXMock
+
+ from tests.unit.sdk.conftest import BothClients
+
+
+FILE_CONTENT_BYTES = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR..."
+NODE_ID = "test-node-123"
+
+
+async def test_prepare_upload_with_bytes() -> None:
+ """Test preparing upload with bytes content (async)."""
+ content = b"test file content"
+ prepared = await FileHandlerBase.prepare_upload(content=content, name="test.txt")
+
+ assert isinstance(prepared, PreparedFile)
+ assert prepared.file_object is not None
+ assert isinstance(prepared.file_object, BytesIO)
+ assert prepared.filename == "test.txt"
+ assert prepared.should_close is False
+ assert prepared.file_object.read() == content
+
+
+async def test_prepare_upload_with_bytes_default_name() -> None:
+ """Test preparing upload with bytes content and no name (async)."""
+ content = b"test file content"
+ prepared = await FileHandlerBase.prepare_upload(content=content)
+
+ assert prepared.file_object is not None
+ assert prepared.filename == "uploaded_file"
+ assert prepared.should_close is False
+
+
+async def test_prepare_upload_with_path() -> None:
+ """Test preparing upload with Path content (async, opens in thread pool)."""
+ with tempfile.NamedTemporaryFile(suffix=".txt") as tmp:
+ tmp.write(b"test content from file")
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ prepared = await FileHandlerBase.prepare_upload(content=tmp_path)
+
+ assert prepared.file_object is not None
+ assert prepared.filename == tmp_path.name
+ assert prepared.should_close is True
+ assert prepared.file_object.read() == b"test content from file"
+ prepared.file_object.close()
+
+
+async def test_prepare_upload_with_path_custom_name() -> None:
+ """Test preparing upload with Path content and custom name (async)."""
+ with tempfile.NamedTemporaryFile(suffix=".txt") as tmp:
+ tmp.write(b"test content")
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ prepared = await FileHandlerBase.prepare_upload(content=tmp_path, name="custom_name.txt")
+
+ assert prepared.filename == "custom_name.txt"
+ assert prepared.file_object
+ prepared.file_object.close()
+
+
+async def test_prepare_upload_with_binary_io() -> None:
+ """Test preparing upload with BinaryIO content (async)."""
+ content = BytesIO(b"binary io content")
+ prepared = await FileHandlerBase.prepare_upload(content=content, name="binary.bin")
+
+ assert prepared.file_object is content
+ assert prepared.filename == "binary.bin"
+ assert prepared.should_close is False
+
+
+async def test_prepare_upload_with_none() -> None:
+ """Test preparing upload with None content (async)."""
+ prepared = await FileHandlerBase.prepare_upload(content=None)
+
+ assert prepared.file_object is None
+ assert prepared.filename is None
+ assert prepared.should_close is False
+
+
+def test_prepare_upload_sync_with_bytes() -> None:
+ """Test preparing upload with bytes content (sync)."""
+ content = b"test file content"
+ prepared = FileHandlerBase.prepare_upload_sync(content=content, name="test.txt")
+
+ assert isinstance(prepared, PreparedFile)
+ assert prepared.file_object is not None
+ assert isinstance(prepared.file_object, BytesIO)
+ assert prepared.filename == "test.txt"
+ assert prepared.should_close is False
+ assert prepared.file_object.read() == content
+
+
+def test_prepare_upload_sync_with_bytes_default_name() -> None:
+ """Test preparing upload with bytes content and no name (sync)."""
+ content = b"test file content"
+ prepared = FileHandlerBase.prepare_upload_sync(content=content)
+
+ assert prepared.file_object is not None
+ assert prepared.filename == "uploaded_file"
+ assert prepared.should_close is False
+
+
+def test_prepare_upload_sync_with_path() -> None:
+ """Test preparing upload with Path content (sync)."""
+ with tempfile.NamedTemporaryFile(suffix=".txt") as tmp:
+ tmp.write(b"test content from file")
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ prepared = FileHandlerBase.prepare_upload_sync(content=tmp_path)
+
+ assert prepared.file_object is not None
+ assert prepared.filename == tmp_path.name
+ assert prepared.should_close is True
+ assert prepared.file_object.read() == b"test content from file"
+ prepared.file_object.close()
+
+
+def test_prepare_upload_sync_with_path_custom_name() -> None:
+ """Test preparing upload with Path content and custom name (sync)."""
+ with tempfile.NamedTemporaryFile(suffix=".txt") as tmp:
+ tmp.write(b"test content")
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ prepared = FileHandlerBase.prepare_upload_sync(content=tmp_path, name="custom_name.txt")
+
+ assert prepared.filename == "custom_name.txt"
+ assert prepared.file_object
+ prepared.file_object.close()
+
+
+def test_prepare_upload_sync_with_binary_io() -> None:
+ """Test preparing upload with BinaryIO content (sync)."""
+ content = BytesIO(b"binary io content")
+ prepared = FileHandlerBase.prepare_upload_sync(content=content, name="binary.bin")
+
+ assert prepared.file_object is content
+ assert prepared.filename == "binary.bin"
+ assert prepared.should_close is False
+
+
+def test_prepare_upload_sync_with_none() -> None:
+ """Test preparing upload with None content (sync)."""
+ prepared = FileHandlerBase.prepare_upload_sync(content=None)
+
+ assert prepared.file_object is None
+ assert prepared.filename is None
+ assert prepared.should_close is False
+
+
+def test_handle_error_response_401() -> None:
+ """Test handling 401 authentication error."""
+ response = httpx.Response(status_code=401, json={"errors": [{"message": "Invalid token"}]})
+ exc = httpx.HTTPStatusError(message="Unauthorized", request=httpx.Request("GET", "http://test"), response=response)
+
+ with pytest.raises(AuthenticationError) as excinfo:
+ FileHandlerBase.handle_error_response(exc=exc)
+
+ assert "Invalid token" in str(excinfo.value)
+
+
+def test_handle_error_response_403() -> None:
+ """Test handling 403 forbidden error."""
+ response = httpx.Response(status_code=403, json={"errors": [{"message": "Access denied"}]})
+ exc = httpx.HTTPStatusError(message="Forbidden", request=httpx.Request("GET", "http://test"), response=response)
+
+ with pytest.raises(AuthenticationError) as excinfo:
+ FileHandlerBase.handle_error_response(exc=exc)
+
+ assert "Access denied" in str(excinfo.value)
+
+
+def test_handle_error_response_404() -> None:
+ """Test handling 404 not found error."""
+ response = httpx.Response(status_code=404, json={"detail": "File not found with ID abc123"})
+ exc = httpx.HTTPStatusError(message="Not Found", request=httpx.Request("GET", "http://test"), response=response)
+
+ with pytest.raises(NodeNotFoundError) as excinfo:
+ FileHandlerBase.handle_error_response(exc=exc)
+
+ assert "File not found with ID abc123" in str(excinfo.value)
+
+
+def test_handle_error_response_500() -> None:
+ """Test handling 500 server error (re-raises)."""
+ response = httpx.Response(status_code=500, json={"error": "Internal server error"})
+ exc = httpx.HTTPStatusError(message="Server Error", request=httpx.Request("GET", "http://test"), response=response)
+
+ with pytest.raises(httpx.HTTPStatusError):
+ FileHandlerBase.handle_error_response(exc=exc)
+
+
+def test_handle_response_success() -> None:
+ """Test handling successful response."""
+ request = httpx.Request("GET", "http://test")
+ response = httpx.Response(status_code=200, content=FILE_CONTENT_BYTES, request=request)
+
+ result = FileHandlerBase.handle_response(resp=response)
+
+ assert result == FILE_CONTENT_BYTES
+
+
+@pytest.fixture
+def mock_download_success(httpx_mock: HTTPXMock) -> HTTPXMock:
+ """Mock successful file download."""
+ httpx_mock.add_response(
+ method="GET",
+ url="http://mock/api/storage/files/test-node-123?branch=main",
+ content=FILE_CONTENT_BYTES,
+ headers={"Content-Type": "application/octet-stream"},
+ )
+ return httpx_mock
+
+
+@pytest.fixture
+def mock_download_stream(httpx_mock: HTTPXMock) -> HTTPXMock:
+ """Mock successful streaming file download."""
+ httpx_mock.add_response(
+ method="GET",
+ url="http://mock/api/storage/files/stream-node?branch=main",
+ content=FILE_CONTENT_BYTES,
+ headers={"Content-Type": "application/octet-stream"},
+ )
+ return httpx_mock
+
+
+client_types = ["standard", "sync"]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_file_handler_download_to_memory(
+ client_type: str, clients: BothClients, mock_download_success: HTTPXMock
+) -> None:
+ """Test downloading file to memory via FileHandler."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ handler = FileHandler(client=client)
+ content = await handler.download(node_id=NODE_ID, branch="main")
+ else:
+ handler = FileHandlerSync(client=client)
+ content = handler.download(node_id=NODE_ID, branch="main")
+
+ assert content == FILE_CONTENT_BYTES
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_file_handler_download_to_disk(
+ client_type: str, clients: BothClients, mock_download_stream: HTTPXMock
+) -> None:
+ """Test streaming file download to disk via FileHandler."""
+ client = getattr(clients, client_type)
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ dest_path = Path(tmpdir) / "downloaded.bin"
+
+ if client_type == "standard":
+ handler = FileHandler(client=client)
+ bytes_written = await handler.download(node_id="stream-node", branch="main", dest=dest_path)
+ else:
+ handler = FileHandlerSync(client=client)
+ bytes_written = handler.download(node_id="stream-node", branch="main", dest=dest_path)
+
+ assert bytes_written == len(FILE_CONTENT_BYTES)
+ assert await anyio.Path(dest_path).read_bytes() == FILE_CONTENT_BYTES
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_file_handler_build_url_with_branch(client_type: str, clients: BothClients) -> None:
+ """Test URL building with branch parameter."""
+ client = getattr(clients, client_type)
+
+ handler = FileHandler(client=client) if client_type == "standard" else FileHandlerSync(client=client)
+
+ url = handler._build_url(node_id="node-123", branch="feature-branch")
+ assert url == "http://mock/api/storage/files/node-123?branch=feature-branch"
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_file_handler_build_url_without_branch(client_type: str, clients: BothClients) -> None:
+ """Test URL building without branch parameter."""
+ client = getattr(clients, client_type)
+
+ handler = FileHandler(client=client) if client_type == "standard" else FileHandlerSync(client=client)
+
+ url = handler._build_url(node_id="node-456", branch=None)
+ assert url == "http://mock/api/storage/files/node-456"
diff --git a/tests/unit/sdk/test_file_object.py b/tests/unit/sdk/test_file_object.py
new file mode 100644
index 00000000..f6267003
--- /dev/null
+++ b/tests/unit/sdk/test_file_object.py
@@ -0,0 +1,295 @@
+import tempfile
+from pathlib import Path
+
+import anyio
+import pytest
+from pytest_httpx import HTTPXMock
+
+from infrahub_sdk.exceptions import FeatureNotSupportedError
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+from infrahub_sdk.schema import NodeSchemaAPI
+from tests.unit.sdk.conftest import BothClients
+
+pytestmark = pytest.mark.httpx_mock(can_send_already_matched_responses=True)
+
+client_types = ["standard", "sync"]
+
+FILE_CONTENT = b"Test file content"
+FILE_NAME = "contract.pdf"
+FILE_MIME_TYPE = "application/pdf"
+
+
+@pytest.fixture
+def mock_node_create_with_file(httpx_mock: HTTPXMock) -> HTTPXMock:
+ """Mock the HTTP response for node create with file upload."""
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "NetworkCircuitContractCreate": {
+ "ok": True,
+ "object": {
+ "id": "new-file-node-123",
+ "display_label": FILE_NAME,
+ "file_name": {"value": FILE_NAME},
+ "checksum": {"value": "abc123checksum"},
+ "file_size": {"value": len(FILE_CONTENT)},
+ "file_type": {"value": FILE_MIME_TYPE},
+ "storage_id": {"value": "storage-xyz-789"},
+ "contract_start": {"value": "2024-01-01T00:00:00Z"},
+ "contract_end": {"value": "2024-12-31T23:59:59Z"},
+ },
+ }
+ }
+ },
+ is_reusable=True,
+ )
+ return httpx_mock
+
+
+@pytest.fixture
+def mock_node_update_with_file(httpx_mock: HTTPXMock) -> HTTPXMock:
+ """Mock the HTTP response for node update with file upload."""
+ httpx_mock.add_response(
+ method="POST",
+ json={
+ "data": {
+ "NetworkCircuitContractUpdate": {
+ "ok": True,
+ "object": {
+ "id": "existing-file-node-456",
+ "display_label": FILE_NAME,
+ "file_name": {"value": FILE_NAME},
+ "checksum": {"value": "updated123checksum"},
+ "file_size": {"value": len(FILE_CONTENT)},
+ "file_type": {"value": FILE_MIME_TYPE},
+ "storage_id": {"value": "storage-updated-789"},
+ "contract_start": {"value": "2024-01-01T00:00:00Z"},
+ "contract_end": {"value": "2024-12-31T23:59:59Z"},
+ },
+ }
+ }
+ },
+ is_reusable=True,
+ )
+ return httpx_mock
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_create_with_file_uses_multipart(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI, mock_node_create_with_file: HTTPXMock
+) -> None:
+ """Test that node.save() for create with file content sends a multipart request."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ node.contract_start.value = "2024-01-01T00:00:00Z" # type: ignore[union-attr]
+ node.contract_end.value = "2024-12-31T23:59:59Z" # type: ignore[union-attr]
+ node.upload_from_bytes(content=FILE_CONTENT, name=FILE_NAME)
+
+ if isinstance(node, InfrahubNode):
+ await node.save()
+ else:
+ node.save()
+
+ requests = mock_node_create_with_file.get_requests()
+ assert len(requests) == 1
+ assert requests[0].headers.get("x-infrahub-tracker") == "mutation-networkcircuitcontract-create"
+ assert requests[0].headers.get("content-type").startswith("multipart/form-data;")
+ assert b"Content-Disposition: form-data" in requests[0].content
+ assert f'filename="{FILE_NAME}"'.encode() in requests[0].content
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_update_with_file_uses_multipart(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI, mock_node_update_with_file: HTTPXMock
+) -> None:
+ """Test that node.save() for update with file content sends a multipart request."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ # Simulate an existing node
+ node.id = "existing-file-node-456"
+ node._existing = True
+ node.contract_start.value = "2024-01-01T00:00:00Z" # type: ignore[union-attr]
+ node.contract_end.value = "2024-12-31T23:59:59Z" # type: ignore[union-attr]
+ node.upload_from_bytes(content=FILE_CONTENT, name=FILE_NAME)
+
+ if isinstance(node, InfrahubNode):
+ await node.save()
+ else:
+ node.save()
+
+ requests = mock_node_update_with_file.get_requests()
+ assert len(requests) == 1
+ assert requests[0].headers.get("x-infrahub-tracker") == "mutation-networkcircuitcontract-update"
+ assert requests[0].headers.get("content-type").startswith("multipart/form-data;")
+ assert b"Content-Disposition: form-data" in requests[0].content
+ assert f'filename="{FILE_NAME}"'.encode() in requests[0].content
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_create_file_object_without_file_raises(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that creating a FileObject node without file content raises an error."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ node.contract_start.value = "2024-01-01T00:00:00Z" # type: ignore[union-attr]
+ node.contract_end.value = "2024-12-31T23:59:59Z" # type: ignore[union-attr]
+
+ with pytest.raises(ValueError, match=r"Cannot create .* without file content"):
+ if isinstance(node, InfrahubNode):
+ await node.save()
+ else:
+ node.save()
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_save_clears_file_after_upload(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI, mock_node_create_with_file: HTTPXMock
+) -> None:
+ """Test that file content is cleared after successful save."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ node.contract_start.value = "2024-01-01T00:00:00Z" # type: ignore[union-attr]
+ node.contract_end.value = "2024-12-31T23:59:59Z" # type: ignore[union-attr]
+
+ node.upload_from_bytes(content=FILE_CONTENT, name=FILE_NAME)
+ assert node._file_content is not None
+ assert node._file_name is not None
+
+ if isinstance(node, InfrahubNode):
+ await node.save()
+ else:
+ node.save()
+
+ # File content should be cleared after save
+ assert node._file_content is None
+ assert node._file_name is None
+
+
+@pytest.fixture
+def mock_download_file(httpx_mock: HTTPXMock) -> HTTPXMock:
+ httpx_mock.add_response(
+ method="GET",
+ url="http://mock/api/storage/files/file-node-123?branch=main",
+ content=FILE_CONTENT,
+ headers={"Content-Type": FILE_MIME_TYPE, "Content-Disposition": f'attachment; filename="{FILE_NAME}"'},
+ )
+ return httpx_mock
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_download_file(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI, mock_download_file: HTTPXMock
+) -> None:
+ """Test downloading a file from a FileObject node."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ node.id = "file-node-123"
+ if isinstance(node, InfrahubNode):
+ content = await node.download_file()
+ else:
+ content = node.download_file()
+
+ assert content == FILE_CONTENT
+
+
+@pytest.fixture
+def mock_download_file_to_disk(httpx_mock: HTTPXMock) -> HTTPXMock:
+ httpx_mock.add_response(
+ method="GET",
+ url="http://mock/api/storage/files/file-node-stream?branch=main",
+ content=FILE_CONTENT,
+ headers={"Content-Type": FILE_MIME_TYPE, "Content-Disposition": f'attachment; filename="{FILE_NAME}"'},
+ )
+ return httpx_mock
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_download_file_to_disk(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI, mock_download_file_to_disk: HTTPXMock
+) -> None:
+ """Test downloading a file from a FileObject node directly to disk."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+
+ node.id = "file-node-stream"
+ with tempfile.TemporaryDirectory() as tmpdir:
+ dest_path = Path(tmpdir) / "downloaded.bin"
+
+ if isinstance(node, InfrahubNode):
+ bytes_written = await node.download_file(dest=dest_path)
+ else:
+ bytes_written = node.download_file(dest=dest_path)
+
+ assert bytes_written == len(FILE_CONTENT)
+ assert await anyio.Path(dest_path).read_bytes() == FILE_CONTENT
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_download_file_not_file_object_raises(
+ client_type: str, clients: BothClients, non_file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that download_file raises error on non-FileObject nodes."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=non_file_object_schema, branch="main")
+ with pytest.raises(
+ FeatureNotSupportedError,
+ match=r"calling download_file is only supported for nodes that inherit from CoreFileObject",
+ ):
+ await node.download_file()
+ else:
+ node = InfrahubNodeSync(client=client, schema=non_file_object_schema, branch="main")
+ with pytest.raises(
+ FeatureNotSupportedError,
+ match=r"calling download_file is only supported for nodes that inherit from CoreFileObject",
+ ):
+ node.download_file()
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_download_file_unsaved_node_raises(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that download_file raises error on unsaved nodes."""
+ client = getattr(clients, client_type)
+
+ if client_type == "standard":
+ node = InfrahubNode(client=client, schema=file_object_schema, branch="main")
+ with pytest.raises(ValueError, match=r"Cannot download file for a node that hasn't been saved yet"):
+ await node.download_file()
+ else:
+ node = InfrahubNodeSync(client=client, schema=file_object_schema, branch="main")
+ with pytest.raises(ValueError, match=r"Cannot download file for a node that hasn't been saved yet"):
+ node.download_file()
diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py
index 8dc18c9b..ad3d77eb 100644
--- a/tests/unit/sdk/test_node.py
+++ b/tests/unit/sdk/test_node.py
@@ -2,11 +2,14 @@
import inspect
import ipaddress
+import tempfile
+from io import BytesIO
+from pathlib import Path
from typing import TYPE_CHECKING
import pytest
-from infrahub_sdk.exceptions import NodeNotFoundError
+from infrahub_sdk.exceptions import FeatureNotSupportedError, NodeNotFoundError
from infrahub_sdk.node import (
InfrahubNode,
InfrahubNodeBase,
@@ -129,7 +132,9 @@ async def test_validate_method_signature(
)
-@pytest.mark.parametrize("hfid,expected_kind,expected_hfid", [("BuiltinLocation__JFK1", "BuiltinLocation", ["JFK1"])])
+@pytest.mark.parametrize(
+ ("hfid", "expected_kind", "expected_hfid"), [("BuiltinLocation__JFK1", "BuiltinLocation", ["JFK1"])]
+)
def test_parse_human_friendly_id(hfid: str, expected_kind: str, expected_hfid: list[str]) -> None:
kind, hfid = parse_human_friendly_id(hfid)
assert kind == expected_kind
@@ -2211,289 +2216,6 @@ async def test_relationships_excluded_input_data(
assert node.tags.has_update is False
-@pytest.mark.parametrize("client_type", client_types)
-async def test_create_input_data_with_resource_pool_relationship(
- client: InfrahubClient,
- ipaddress_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- simple_device_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=client,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- device = InfrahubNode(
- client=client,
- schema=simple_device_schema,
- data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
- )
- else:
- ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=client,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- device = InfrahubNode(
- client=client,
- schema=simple_device_schema,
- data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
- )
-
- assert device._generate_input_data()["data"] == {
- "data": {
- "name": {"value": "device-01"},
- "primary_address": {"from_pool": {"id": "pppppppp-pppp-pppp-pppp-pppppppppppp"}},
- "ip_address_pool": {"id": "pppppppp-pppp-pppp-pppp-pppppppppppp"},
- },
- }
-
-
-@pytest.mark.parametrize("client_type", client_types)
-async def test_create_mutation_query_with_resource_pool_relationship(
- client: InfrahubClient,
- ipaddress_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- simple_device_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=client,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- device = InfrahubNode(
- client=client,
- schema=simple_device_schema,
- data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
- )
- else:
- ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=client,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
- device = InfrahubNode(
- client=client,
- schema=simple_device_schema,
- data={"name": "device-01", "primary_address": ip_pool, "ip_address_pool": ip_pool},
- )
-
- assert device._generate_mutation_query() == {
- "object": {
- "id": None,
- "primary_address": {"node": {"__typename": None, "display_label": None, "id": None}},
- "ip_address_pool": {"node": {"__typename": None, "display_label": None, "id": None}},
- },
- "ok": None,
- }
-
-
-@pytest.mark.parametrize("client_type", client_types)
-async def test_get_pool_allocated_resources(
- httpx_mock: HTTPXMock,
- mock_schema_query_ipam: HTTPXMock,
- clients: BothClients,
- ipaddress_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "InfrahubResourcePoolAllocated": {
- "count": 2,
- "edges": [
- {
- "node": {
- "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "kind": "IpamIPAddress",
- "branch": "main",
- "identifier": "ip-1",
- }
- },
- {
- "node": {
- "id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
- "kind": "IpamIPAddress",
- "branch": "main",
- "identifier": "ip-2",
- }
- },
- ],
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "get-allocated-resources-page1"},
- )
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "IpamIPAddress": {
- "count": 2,
- "edges": [
- {"node": {"id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", "__typename": "IpamIPAddress"}},
- {"node": {"id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67", "__typename": "IpamIPAddress"}},
- ],
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"},
- )
-
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=clients.standard,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
-
- resources = await ip_pool.get_pool_allocated_resources(resource=ip_prefix)
- assert len(resources) == 2
- assert [resource.id for resource in resources] == [
- "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
- ]
- else:
- ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=clients.sync,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
-
- resources = ip_pool.get_pool_allocated_resources(resource=ip_prefix)
- assert len(resources) == 2
- assert [resource.id for resource in resources] == [
- "17d9bd8d-8fc2-70b0-278a-179f425e25cb",
- "17d9bd8e-31ee-acf0-2786-179fb76f2f67",
- ]
-
-
-@pytest.mark.parametrize("client_type", client_types)
-async def test_get_pool_resources_utilization(
- httpx_mock: HTTPXMock,
- clients: BothClients,
- ipaddress_pool_schema: NodeSchemaAPI,
- ipam_ipprefix_schema: NodeSchemaAPI,
- ipam_ipprefix_data: dict[str, Any],
- client_type: str,
-) -> None:
- httpx_mock.add_response(
- method="POST",
- json={
- "data": {
- "InfrahubResourcePoolUtilization": {
- "count": 1,
- "edges": [
- {
- "node": {
- "id": "17d9bd86-3471-a020-2782-179ff078e58f",
- "utilization": 93.75,
- "utilization_branches": 0,
- "utilization_default_branch": 93.75,
- }
- }
- ],
- }
- }
- },
- match_headers={"X-Infrahub-Tracker": "get-pool-utilization"},
- )
-
- if client_type == "standard":
- ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNode(
- client=clients.standard,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
-
- utilizations = await ip_pool.get_pool_resources_utilization()
- assert len(utilizations) == 1
- assert utilizations[0]["utilization"] == 93.75
- else:
- ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data)
- ip_pool = InfrahubNodeSync(
- client=clients.sync,
- schema=ipaddress_pool_schema,
- data={
- "id": "pppppppp-pppp-pppp-pppp-pppppppppppp",
- "name": "Core loopbacks",
- "default_address_type": "IpamIPAddress",
- "default_prefix_length": 32,
- "ip_namespace": "ip_namespace",
- "resources": [ip_prefix],
- },
- )
-
- utilizations = ip_pool.get_pool_resources_utilization()
- assert len(utilizations) == 1
- assert utilizations[0]["utilization"] == 93.75
-
-
@pytest.mark.parametrize("client_type", client_types)
async def test_from_graphql(
clients: BothClients, mock_schema_query_01: HTTPXMock, location_data01: dict[str, Any], client_type: str
@@ -3271,3 +2993,253 @@ def test_relationship_manager_generate_query_data_without_include_metadata() ->
assert "count" in data
assert "edges" in data
assert "node" in data["edges"]
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_is_file_object_true(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that is_file_object returns True for nodes inheriting from CoreFileObject."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ assert node.is_file_object()
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_is_file_object_false(
+ client_type: str, clients: BothClients, non_file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that is_file_object returns False for regular nodes."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=non_file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=non_file_object_schema, branch="main")
+
+ assert not node.is_file_object()
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_upload_from_bytes_with_bytes(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that upload_from_bytes works with bytes on FileObject nodes."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"PDF content here"
+ node.upload_from_bytes(content=file_content, name="contract.pdf")
+
+ assert node._file_content == file_content
+ assert node._file_name == "contract.pdf"
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_upload_from_path(client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI) -> None:
+ """Test that upload_from_path works with a Path object."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Content from file path"
+ with tempfile.NamedTemporaryFile(suffix=".pdf") as tmp:
+ tmp.write(file_content)
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ node.upload_from_path(path=tmp_path)
+ assert node._file_content == tmp_path
+ assert node._file_name == tmp_path.name
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_upload_from_bytes_with_binary_io(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that upload_from_bytes works with a BinaryIO object."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Content from BinaryIO"
+ file_obj = BytesIO(file_content)
+
+ node.upload_from_bytes(content=file_obj, name="uploaded.pdf")
+
+ assert node._file_content == file_obj
+ assert node._file_name == "uploaded.pdf"
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_upload_from_bytes_on_non_file_object_raises(
+ client_type: str, clients: BothClients, non_file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that upload_from_bytes raises FeatureNotSupportedError on non-FileObject nodes."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=non_file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=non_file_object_schema, branch="main")
+
+ with pytest.raises(FeatureNotSupportedError, match=r"File upload is not supported"):
+ node.upload_from_bytes(content=b"some content", name="file.txt")
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_upload_from_path_on_non_file_object_raises(
+ client_type: str, clients: BothClients, non_file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test that upload_from_path raises FeatureNotSupportedError on non-FileObject nodes."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=non_file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=non_file_object_schema, branch="main")
+
+ with pytest.raises(FeatureNotSupportedError, match=r"File upload is not supported"):
+ node.upload_from_path(path=Path("/some/file.txt"))
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_clear_file(client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI) -> None:
+ """Test that clear_file removes pending file content."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Test content"
+ file_name = "file.txt"
+
+ node.upload_from_bytes(content=file_content, name=file_name)
+ assert node._file_content == file_content
+ assert node._file_name == file_name
+
+ node.clear_file()
+ assert node._file_content is None
+ assert node._file_name is None
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_get_file_for_upload_bytes(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test _get_file_for_upload with bytes returns PreparedFile with BytesIO."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Test content"
+ file_name = "test.txt"
+ node.upload_from_bytes(content=file_content, name=file_name)
+
+ if isinstance(node, InfrahubNode):
+ prepared = await node._get_file_for_upload()
+ else:
+ prepared = node._get_file_for_upload_sync()
+
+ assert prepared.file_object
+ assert prepared.filename == file_name
+ assert not prepared.should_close
+ assert prepared.file_object.read() == file_content
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_get_file_for_upload_path(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test _get_file_for_upload with Path returns PreparedFile with opened file handle."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Content from path"
+ with tempfile.NamedTemporaryFile(suffix=".pdf") as tmp:
+ tmp.write(file_content)
+ tmp.flush()
+ tmp_path = Path(tmp.name)
+
+ node.upload_from_path(path=tmp_path)
+
+ if isinstance(node, InfrahubNode):
+ prepared = await node._get_file_for_upload()
+ else:
+ prepared = node._get_file_for_upload_sync()
+
+ assert prepared.file_object
+ assert prepared.filename == tmp_path.name
+ assert prepared.should_close # Path files should be closed after upload
+ assert prepared.file_object.read() == file_content
+ prepared.file_object.close()
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_get_file_for_upload_binary_io(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test _get_file_for_upload with BinaryIO returns PreparedFile with the same object."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ file_content = b"Content from BinaryIO"
+ file_name = "test.bin"
+ file_obj_input = BytesIO(file_content)
+ node.upload_from_bytes(content=file_obj_input, name=file_name)
+
+ if isinstance(node, InfrahubNode):
+ prepared = await node._get_file_for_upload()
+ else:
+ prepared = node._get_file_for_upload_sync()
+
+ assert prepared.file_object is file_obj_input # Should be the same object
+ assert prepared.filename == file_name
+ assert not prepared.should_close # BinaryIO provided by user shouldn't be closed
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_get_file_for_upload_none(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test _get_file_for_upload with no file set returns PreparedFile with None values."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ if isinstance(node, InfrahubNode):
+ prepared = await node._get_file_for_upload()
+ else:
+ prepared = node._get_file_for_upload_sync()
+
+ assert prepared.file_object is None
+ assert prepared.filename is None
+ assert not prepared.should_close
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_node_generate_input_data_with_file(
+ client_type: str, clients: BothClients, file_object_schema: NodeSchemaAPI
+) -> None:
+ """Test _generate_input_data places file at mutation level, not inside data."""
+ if client_type == "standard":
+ node = InfrahubNode(client=clients.standard, schema=file_object_schema, branch="main")
+ else:
+ node = InfrahubNodeSync(client=clients.sync, schema=file_object_schema, branch="main")
+
+ node.upload_from_bytes(content=b"test content", name="test.txt")
+
+ input_data = node._generate_input_data()
+
+ assert "file" in input_data["data"], "file should be at mutation payload level"
+ assert input_data["data"]["file"] == "$file"
+ assert "file" not in input_data["data"]["data"], "file should not be inside nested data dict"
+ assert "file" in input_data["mutation_variables"]
+ assert input_data["mutation_variables"]["file"] is bytes
diff --git a/tests/unit/sdk/test_query_analyzer.py b/tests/unit/sdk/test_query_analyzer.py
index a4deefb2..ffa810ad 100644
--- a/tests/unit/sdk/test_query_analyzer.py
+++ b/tests/unit/sdk/test_query_analyzer.py
@@ -148,7 +148,7 @@ async def test_get_variables(query_01: str, query_04: str, query_05: str, query_
@pytest.mark.parametrize(
- "var_type,var_required",
+ ("var_type", "var_required"),
[("[ID]", False), ("[ID]!", True), ("[ID!]", False), ("[ID!]!", True)],
)
async def test_get_nested_variables(var_type: str, var_required: bool) -> None:
diff --git a/tests/unit/sdk/test_schema.py b/tests/unit/sdk/test_schema.py
index 05302b11..149b584c 100644
--- a/tests/unit/sdk/test_schema.py
+++ b/tests/unit/sdk/test_schema.py
@@ -243,10 +243,7 @@ async def test_schema_wait_happy_path(clients: BothClients, client_type: list[st
@pytest.mark.parametrize("client_type", client_types)
async def test_schema_set_cache_dict(clients: BothClients, client_type: list[str], schema_query_01_data: dict) -> None:
- if client_type == "standard":
- client = clients.standard
- else:
- client = clients.sync
+ client = clients.standard if client_type == "standard" else clients.sync
client.schema.set_cache(schema_query_01_data, branch="branch1")
assert "branch1" in client.schema.cache
@@ -257,10 +254,7 @@ async def test_schema_set_cache_dict(clients: BothClients, client_type: list[str
async def test_schema_set_cache_branch_schema(
clients: BothClients, client_type: list[str], schema_query_01_data: dict
) -> None:
- if client_type == "standard":
- client = clients.standard
- else:
- client = clients.sync
+ client = clients.standard if client_type == "standard" else clients.sync
schema = BranchSchema.from_api_response(schema_query_01_data)
diff --git a/tests/unit/sdk/test_timestamp.py b/tests/unit/sdk/test_timestamp.py
index 713f2f72..fd189f4b 100644
--- a/tests/unit/sdk/test_timestamp.py
+++ b/tests/unit/sdk/test_timestamp.py
@@ -43,7 +43,7 @@ def test_parse_string() -> None:
@pytest.mark.parametrize(
- "input_str,expected_datetime",
+ ("input_str", "expected_datetime"),
[
pytest.param(
"2022-01-01T10:01:01.123000Z", datetime(2022, 1, 1, 10, 1, 1, 123000, tzinfo=UTC), id="milliseconds"
@@ -69,7 +69,7 @@ def test_to_datetime(input_str: str, expected_datetime: datetime) -> None:
@pytest.mark.parametrize(
- "input_str,expected_str,expected_str_no_z",
+ ("input_str", "expected_str", "expected_str_no_z"),
[
pytest.param(
"2022-01-01T10:01:01.123000Z",
diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py
new file mode 100644
index 00000000..70a4064c
--- /dev/null
+++ b/tests/unit/test_tasks.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+import pytest
+from invoke import Exit
+
+import tasks
+
+
+class TestRequireTool:
+ """Verify that require_tool() raises with a helpful message when an external tool is missing."""
+
+ def test_raises_when_tool_missing(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Arrange
+ monkeypatch.setattr(tasks, "which", lambda _name: None)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="mytool is not installed"):
+ tasks.require_tool("mytool", "Install it with: pip install mytool")
+
+ def test_passes_when_tool_installed(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Arrange
+ monkeypatch.setattr(tasks, "which", lambda _name: "/usr/bin/mytool")
+
+ # Act / Assert — no exception means tool is found
+ tasks.require_tool("mytool", "Install it with: pip install mytool")
+
+ def test_error_message_contains_install_hint(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Arrange
+ monkeypatch.setattr(tasks, "which", lambda _name: None)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="Install it with: npm install"):
+ tasks.require_tool("sometool", "Install it with: npm install")
diff --git a/uv.lock b/uv.lock
index eed6d509..b832ef39 100644
--- a/uv.lock
+++ b/uv.lock
@@ -177,15 +177,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
]
-[[package]]
-name = "cfgv"
-version = "3.4.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" },
-]
-
[[package]]
name = "charset-normalizer"
version = "3.4.4"
@@ -455,15 +446,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
]
-[[package]]
-name = "distlib"
-version = "0.4.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
-]
-
[[package]]
name = "docker"
version = "7.1.0"
@@ -560,15 +542,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/23/dfb161e91db7c92727db505dc72a384ee79681fe0603f706f9f9f52c2901/fastapi-0.121.2-py3-none-any.whl", hash = "sha256:f2d80b49a86a846b70cc3a03eb5ea6ad2939298bf6a7fe377aa9cd3dd079d358", size = 109201, upload-time = "2025-11-13T17:05:52.718Z" },
]
-[[package]]
-name = "filelock"
-version = "3.20.3"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" },
-]
-
[[package]]
name = "fsspec"
version = "2025.10.0"
@@ -690,15 +663,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" },
]
-[[package]]
-name = "identify"
-version = "2.6.15"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
-]
-
[[package]]
name = "idna"
version = "3.11"
@@ -722,7 +686,7 @@ wheels = [
[[package]]
name = "infrahub-sdk"
-version = "1.18.1"
+version = "1.19.0"
source = { editable = "." }
dependencies = [
{ name = "dulwich" },
@@ -753,6 +717,7 @@ ctl = [
{ name = "ariadne-codegen" },
{ name = "click" },
{ name = "jinja2" },
+ { name = "mdxify" },
{ name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
{ name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
{ name = "pyarrow" },
@@ -770,7 +735,7 @@ dev = [
{ name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
{ name = "ipython", version = "9.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
{ name = "mypy" },
- { name = "pre-commit" },
+ { name = "prek" },
{ name = "pytest" },
{ name = "pytest-asyncio" },
{ name = "pytest-clarity" },
@@ -819,6 +784,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.20" },
{ name = "jinja2", marker = "extra == 'all'", specifier = ">=3" },
{ name = "jinja2", marker = "extra == 'ctl'", specifier = ">=3" },
+ { name = "mdxify", marker = "python_full_version >= '3.10' and extra == 'ctl'", specifier = ">=0.2.23" },
{ name = "netutils", specifier = ">=1.0.0" },
{ name = "numpy", marker = "python_full_version >= '3.12' and extra == 'all'", specifier = ">=1.26.2" },
{ name = "numpy", marker = "python_full_version >= '3.12' and extra == 'ctl'", specifier = ">=1.26.2" },
@@ -845,11 +811,11 @@ provides-extras = ["ctl", "all"]
dev = [
{ name = "astroid", specifier = ">=3.1,<4.0" },
{ name = "codecov" },
- { name = "infrahub-testcontainers", specifier = ">=1.5.1" },
+ { name = "infrahub-testcontainers", specifier = ">=1.7.3" },
{ name = "invoke", specifier = ">=2.2.0" },
{ name = "ipython" },
{ name = "mypy", specifier = "==1.11.2" },
- { name = "pre-commit", specifier = ">=2.20.0" },
+ { name = "prek", specifier = ">=0.3.0" },
{ name = "pytest", specifier = ">=9.0,<9.1" },
{ name = "pytest-asyncio", specifier = ">=1.3,<1.4" },
{ name = "pytest-clarity", specifier = ">=1.0.1" },
@@ -873,7 +839,7 @@ lint = [
{ name = "yamllint" },
]
tests = [
- { name = "infrahub-testcontainers", specifier = ">=1.5.1" },
+ { name = "infrahub-testcontainers", specifier = ">=1.7.3" },
{ name = "pytest", specifier = ">=9.0,<9.1" },
{ name = "pytest-asyncio", specifier = ">=1.3,<1.4" },
{ name = "pytest-clarity", specifier = ">=1.0.1" },
@@ -1185,6 +1151,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
+[[package]]
+name = "mdxify"
+version = "0.2.36"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/57/8b/eec3cc2f5b9e15a1d5d1a7399cf68b420bbd7ab8c363c789cfb14f783a09/mdxify-0.2.36.tar.gz", hash = "sha256:bd8afc3036b8258b13cd6d44413f1805088a9959b1b2d63eae9160cc037ee8e4", size = 1250127, upload-time = "2026-02-06T17:58:19.542Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/aa/b4/3ad6aac18dbd5913201cd3bbf19a896a59fd418c7e87a5abf18575fb339a/mdxify-0.2.36-py3-none-any.whl", hash = "sha256:9dbe9b3e608ad1b9d5d95f95fcc66788d0d737a52eadd8bdb1244e628dc6d98c", size = 24552, upload-time = "2026-02-06T17:58:18.542Z" },
+]
+
[[package]]
name = "mypy"
version = "1.11.2"
@@ -1232,15 +1210,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/da/a2/20e3cc569b3a41cc36181212c99f3e3c0aa9201174b2bf99313328824a2b/netutils-1.15.1-py3-none-any.whl", hash = "sha256:c42886d456f9b21bee395628b100dc2cd4b68fcc223f33c669672c3468d6b4dc", size = 532245, upload-time = "2025-10-21T00:41:06.141Z" },
]
-[[package]]
-name = "nodeenv"
-version = "1.9.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
-]
-
[[package]]
name = "numpy"
version = "2.2.6"
@@ -1611,22 +1580,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/d1/e4ed95fdd3ef13b78630280d9e9e240aeb65cc7c544ec57106149c3942fb/pprintpp-0.4.0-py2.py3-none-any.whl", hash = "sha256:b6b4dcdd0c0c0d75e4d7b2f21a9e933e5b2ce62b26e1a54537f9651ae5a5c01d", size = 16952, upload-time = "2018-07-01T01:42:36.496Z" },
]
-[[package]]
-name = "pre-commit"
-version = "4.3.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "cfgv" },
- { name = "identify" },
- { name = "nodeenv" },
- { name = "pyyaml" },
- { name = "virtualenv" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" },
-]
-
[[package]]
name = "prefect-client"
version = "3.6.13"
@@ -1678,6 +1631,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/51/0216ef9c7ca6002e7b5ae92cdb2858e4f8c5d69c7f2a4a9050afc1086934/prefect_client-3.6.13-py3-none-any.whl", hash = "sha256:3076194ec12b3770e53b1cb8f1d68a7628b8658912e183431a398d7e1617570d", size = 899733, upload-time = "2026-01-23T04:17:47.825Z" },
]
+[[package]]
+name = "prek"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/1e/6c23d3470145be1d6ff29d93f2a521864788827d22e509e2b978eb5bb4cb/prek-0.3.0.tar.gz", hash = "sha256:e70f16bbaf2803e490b866cfa997ea5cc46e7ada55d61f0cdd84bc90b8d5ca7f", size = 316063, upload-time = "2026-01-22T04:00:01.648Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/84/49/469219c19bb00db678806f79fc084ac1ce9952004a183a798db26f6df22b/prek-0.3.0-py3-none-linux_armv6l.whl", hash = "sha256:7e5d40b22deff23e36f7ad91e24b8e62edf32f30f6dad420459f7ec7188233c3", size = 4317493, upload-time = "2026-01-22T03:59:51.769Z" },
+ { url = "https://files.pythonhosted.org/packages/87/9f/f7afc49cc0fd92d1ba492929dc1573cb7004d09b61341aa6ee32a5288657/prek-0.3.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6712b58cbb5a7db0aaef180c489ce9f3462e0293d54e54baeedd75fc0d9d8c28", size = 4323961, upload-time = "2026-01-22T03:59:56.92Z" },
+ { url = "https://files.pythonhosted.org/packages/42/94/ba36dc29e71d476bf71c3bac2b0c89cfcfc4b8973a0a6b20728f429f4560/prek-0.3.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5f2c446fd9012a98c5690b4badf3f7dfb8d424cf0c6798a2d08ee56511f0a670", size = 3970121, upload-time = "2026-01-22T03:59:55.722Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/93/6131dd9f6cde3d72815b978b766de21b2ac9cc15fc38f5c22267cc7e574d/prek-0.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:10f3da7cda2397f7d2f3ff7f2be0d7486c15d4941f7568095b7168e57a9c88c5", size = 4307430, upload-time = "2026-01-22T03:59:47.484Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/08/7c55a765d96028d38dc984e66a096a969d80e56f66a47801acc86dede856/prek-0.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f747bb4a4322fea35d548cd2c1bd24477f56ed009f3d62a2b97ecbfc88096ac", size = 4238032, upload-time = "2026-01-22T04:00:02.606Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/a7/59d9bf902b749c8a0cef9e8ac073cc5c886634cd09404c00af4a76470b3b/prek-0.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40bd61f11d8caabc0e2a5d4c326639d6ff558b580ef4388aabec293ddb5afd35", size = 4493295, upload-time = "2026-01-22T03:59:45.964Z" },
+ { url = "https://files.pythonhosted.org/packages/08/dc/902b2e4ddff59ad001ddc2cda3b47e457ab1ee811698a4002b3e4f84faf1/prek-0.3.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d096b5e273d17a1300b20a7101a9e5a624a8104825eb59659776177f7fccea1", size = 5033370, upload-time = "2026-01-22T03:59:44.806Z" },
+ { url = "https://files.pythonhosted.org/packages/15/cd/277a3d2768b80bb1ff3c2ea8378687bb4c527d88a8b543bf6f364f8a0dc9/prek-0.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df39face5f1298851fbae495267ddf60f1694ea594ed5c6cdb88bdd6de14f6a4", size = 4549792, upload-time = "2026-01-22T03:59:41.518Z" },
+ { url = "https://files.pythonhosted.org/packages/26/21/53aeabd3822ef7fa350aac66d099d4d97b05e8383a2df35499229389a642/prek-0.3.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9462f80a576d661490aa058d4493a991a34c7532dea76b7b004a17c8bc6b80f2", size = 4323158, upload-time = "2026-01-22T03:59:54.284Z" },
+ { url = "https://files.pythonhosted.org/packages/27/c2/3a7392b0e7fd07e339d89701b49b12a89d85256a57279877195028215957/prek-0.3.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:33d3fa40eecf996ed14bab2d006c39d21ae344677d62599963efd9b27936558e", size = 4344632, upload-time = "2026-01-22T04:00:03.71Z" },
+ { url = "https://files.pythonhosted.org/packages/71/89/8254ac981d75d0ce2826bcac74fed901540d629cb2d9f4d73ce62f8ce843/prek-0.3.0-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:d8c6abfd53a23718afdf4e6107418db1d74c5d904e9b7ec7900e285f8da90723", size = 4216608, upload-time = "2026-01-22T03:59:58.527Z" },
+ { url = "https://files.pythonhosted.org/packages/20/f5/854d57d89376fac577ee647a1dba1b87e27b2baeca7edc3d40295adeb7c8/prek-0.3.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:eb4c80c3e7c0e16bf307947809112bfef3715a1b83c2b03f5937707934635617", size = 4371174, upload-time = "2026-01-22T03:59:53.088Z" },
+ { url = "https://files.pythonhosted.org/packages/03/38/8927619411da8d3f189415c452ec7a463f09dea69e272888723f37b4b18f/prek-0.3.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:602bcce070c50900167acd89dcdf95d27894412f8a7b549c8eb66de612a99653", size = 4659113, upload-time = "2026-01-22T03:59:43.166Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/4d/16baeef633b8b230dde878b858c0e955149c860feef518b5eb5aac640eec/prek-0.3.0-py3-none-win32.whl", hash = "sha256:a69229365ce33c68c05db7ae73ad1ef8bc7f0914ab3bc484ab7781256bcdfb7a", size = 3937103, upload-time = "2026-01-22T03:59:48.719Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f2/c7395b4afd1bba32cad2b24c30fd7781e94c1e41137348cd150bbef001d6/prek-0.3.0-py3-none-win_amd64.whl", hash = "sha256:a0379afd8d31bd5da6ee8977820fdb3c30601bed836b39761e6f605451dbccaa", size = 4290763, upload-time = "2026-01-22T03:59:59.938Z" },
+ { url = "https://files.pythonhosted.org/packages/df/83/97ed76ab5470025992cd50cb1ebdeb21fcf6c25459f9ffc49ac7bf040cf4/prek-0.3.0-py3-none-win_arm64.whl", hash = "sha256:82e2c64f75dc1ea6f2023f4322500eb8da5d0557baf06c88677bddf163e1542a", size = 4041580, upload-time = "2026-01-22T03:59:50.082Z" },
+]
+
[[package]]
name = "prometheus-client"
version = "0.23.1"
@@ -2939,21 +2916,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" },
]
-[[package]]
-name = "virtualenv"
-version = "20.36.1"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "distlib" },
- { name = "filelock" },
- { name = "platformdirs" },
- { name = "typing-extensions", marker = "python_full_version < '3.11'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" },
-]
-
[[package]]
name = "wcwidth"
version = "0.2.14"