diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 67d57978..5e7c572a 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -19,9 +19,6 @@ jobs: run: >- docker run --rm -v "$PWD:/mnt" --workdir "/mnt" "koalaman/shellcheck:v0.8.0" --color=always \ $(find . -type f -exec grep -m1 -l -E '^#!.*sh.*' {} \; | grep -v '/.git/') - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - with: - python-version: '3.14' - name: Install uv uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: @@ -35,3 +32,17 @@ jobs: uv run ruff format --check . uv run pyright working-directory: xtest + - name: Lint otdf-local + run: | + uv sync + uv run ruff check . + uv run ruff format --check . + uv run pyright + working-directory: otdf-local + - name: Lint otdf-sdk-mgr + run: | + uv sync + uv run ruff check . + uv run ruff format --check . + uv run pyright + working-directory: otdf-sdk-mgr diff --git a/.github/workflows/xtest.yml b/.github/workflows/xtest.yml index 9594c929..d96f8e40 100644 --- a/.github/workflows/xtest.yml +++ b/.github/workflows/xtest.yml @@ -113,13 +113,10 @@ jobs: path: otdf-sdk persist-credentials: false repository: opentdf/tests - sparse-checkout: xtest/sdk - - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - with: - python-version: "3.14" - - run: |- - pip install -r scripts/requirements.txt - working-directory: otdf-sdk/xtest/sdk + sparse-checkout: | + xtest/sdk + otdf-sdk-mgr + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 - id: version-info uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea #v7.0.1 with: @@ -134,8 +131,7 @@ jobs: const { execSync } = require('child_process'); const path = require('path'); - const workingDir = path.join(process.env.GITHUB_WORKSPACE, 'otdf-sdk/xtest/sdk'); - const resolveVersionScript = path.join(workingDir, 'scripts/resolve-version.py'); + const sdkMgrDir = path.join(process.env.GITHUB_WORKSPACE, 'otdf-sdk/otdf-sdk-mgr'); const defaultTags = process.env.DEFAULT_TAGS || 'main'; core.setOutput('default-tags', defaultTags); @@ -150,7 +146,7 @@ jobs: for (const [sdkType, ref] of Object.entries(refs)) { try { - const output = execSync(`python3 ${resolveVersionScript} ${sdkType} ${ref}`, { cwd: workingDir }).toString(); + const output = execSync(`uv run --project ${sdkMgrDir} otdf-sdk-mgr versions resolve ${sdkType} ${ref}`, { cwd: sdkMgrDir }).toString(); const ojson = JSON.parse(output); if (!!ojson.err) { throw new Error(ojson.err); @@ -247,10 +243,8 @@ jobs: extra-keys: ${{ steps.load-extra-keys.outputs.EXTRA_KEYS }} log-type: json - - name: Set up Python 3.14 - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - with: - python-version: "3.14" + - name: Install uv + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 - uses: bufbuild/buf-action@8f4a1456a0ab6a1eb80ba68e53832e6fcfacc16c # v1.3.0 with: setup_only: true @@ -271,6 +265,7 @@ jobs: ######### CHECKOUT JS CLI ############# - name: Configure js-sdk + id: configure-js uses: ./otdftests/xtest/setup-cli-tool with: path: otdftests/xtest/sdk @@ -278,6 +273,7 @@ jobs: version-info: "${{ needs.resolve-versions.outputs.js }}" - name: Cache npm + if: fromJson(steps.configure-js.outputs.heads)[0] != null uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 with: path: ~/.npm @@ -288,6 +284,7 @@ jobs: ######## SETUP THE JS CLI ############# - name: build and setup the web-sdk cli id: build-web-sdk + if: fromJson(steps.configure-js.outputs.heads)[0] != null run: | make working-directory: otdftests/xtest/sdk/js @@ -302,6 +299,7 @@ jobs: version-info: "${{ needs.resolve-versions.outputs.go }}" - name: Cache Go modules + if: fromJson(steps.configure-go.outputs.heads)[0] != null uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 with: path: | @@ -313,13 +311,14 @@ jobs: - name: Resolve otdfctl heads id: resolve-otdfctl-heads + if: fromJson(steps.configure-go.outputs.heads)[0] != null run: |- echo "OTDFCTL_HEADS=$OTDFCTL_HEADS" >> "$GITHUB_ENV" env: OTDFCTL_HEADS: ${{ steps.configure-go.outputs.heads }} - name: Replace otdfctl go.mod packages, but only at head version of platform - if: env.FOCUS_SDK == 'go' && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) + if: fromJson(steps.configure-go.outputs.heads)[0] != null && env.FOCUS_SDK == 'go' && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) env: PLATFORM_WORKING_DIR: ${{ steps.run-platform.outputs.platform-working-dir }} run: |- @@ -339,6 +338,7 @@ jobs: ######## SETUP THE GO CLI ############# - name: Prepare go cli + if: fromJson(steps.configure-go.outputs.heads)[0] != null run: |- make working-directory: otdftests/xtest/sdk/go @@ -346,6 +346,7 @@ jobs: ####### CHECKOUT JAVA SDK ############## - name: Configure java-sdk + id: configure-java uses: ./otdftests/xtest/setup-cli-tool with: path: otdftests/xtest/sdk @@ -353,6 +354,7 @@ jobs: version-info: "${{ needs.resolve-versions.outputs.java }}" - name: Cache Maven repository + if: fromJson(steps.configure-java.outputs.heads)[0] != null uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 with: path: ~/.m2/repository @@ -361,8 +363,10 @@ jobs: maven-${{ runner.os }}- - name: pre-release protocol buffers for java-sdk - if: | - (env.FOCUS_SDK == 'go' || env.FOCUS_SDK == 'java') && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) + if: >- + fromJson(steps.configure-java.outputs.heads)[0] != null + && (env.FOCUS_SDK == 'go' || env.FOCUS_SDK == 'java') + && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) run: |- echo "Replacing .env files for java-sdk..." echo "Platform tag: $platform_tag" @@ -384,6 +388,7 @@ jobs: ####### SETUP JAVA CLI ############## - name: Prepare java cli + if: fromJson(steps.configure-java.outputs.heads)[0] != null run: | make working-directory: otdftests/xtest/sdk/java @@ -440,8 +445,6 @@ jobs: fi working-directory: ${{ steps.run-platform.outputs.platform-working-dir }} - - name: Install uv - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 - name: Install test dependencies run: uv sync working-directory: otdftests/xtest @@ -465,7 +468,7 @@ jobs: - name: Run all standard xtests if: ${{ env.FOCUS_SDK == 'all' }} run: |- - uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v test_tdfs.py test_policytypes.py + uv run pytest -n auto --dist worksteal --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v test_tdfs.py test_policytypes.py working-directory: otdftests/xtest env: PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}" @@ -475,7 +478,7 @@ jobs: - name: Run xtests focusing on a specific SDK if: ${{ env.FOCUS_SDK != 'all' }} run: |- - uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_tdfs.py test_policytypes.py + uv run pytest -n auto --dist worksteal --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_tdfs.py test_policytypes.py working-directory: otdftests/xtest env: PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}" @@ -575,7 +578,7 @@ jobs: -ra -v --numprocesses auto - --dist loadscope + --dist worksteal --html test-results/attributes-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --audit-log-dir test-results/audit-logs diff --git a/.gitignore b/.gitignore index ecb9a979..669c6668 100644 --- a/.gitignore +++ b/.gitignore @@ -29,5 +29,6 @@ xtest/sdk/java/cmdline.jar /xtest/java-sdk/ /xtest/sdk/go/otdfctl /xtest/otdfctl/ +/xtest/logs/ /tmp/ diff --git a/.shfmt b/.shfmt new file mode 100644 index 00000000..93644280 --- /dev/null +++ b/.shfmt @@ -0,0 +1,20 @@ +# Language variant +language_dialect: bash + +# Indentation (2 spaces) +indent: 2 + +# Binary operators at start of line +binary_next_line: false + +# Switch case indentation +switch_case_indent: true + +# Redirect operators with space +space_redirects: true + +# Keep column alignment +keep_padding: true + +# Function brace on same line +function_next_line: false diff --git a/AGENTS.md b/AGENTS.md index 10897d65..ff4393b6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,35 +1,45 @@ -# Repository Guidelines +# Agent Guide: Working with OpenTDF Tests and Debugging -## Project Structure & Module Organization +This guide provides essential knowledge for AI agents performing updates, refactorings, and debugging of OpenTDF libraries and applications under test. -- `xtest/`: Cross-client compatibility test harness (Python + `pytest`), with fixtures in `xtest/fixtures/` and golden data in `xtest/golden/`. -- `xtest/sdk/`: Helper scripts and Makefiles for checking out/building SDKs under test (e.g., `xtest/sdk/scripts/checkout-all.sh`, `cd xtest/sdk && make`). -- `vulnerability/`: Playwright-based security regression tests (`vulnerability/tests/`). -- `.github/workflows/`: CI workflows (lint/type-check, xtest matrix runs, vulnerability runs). +## Test Framework Overview -## Build, Test, and Development Commands +### Structure +- **Test Directory**: `xtest/` - pytest-based integration tests +- **SDK Distributions**: `sdk/{go,java,js}/dist/` - built SDK distributions with CLI wrappers +- **SDK Configuration**: `otdf-sdk-mgr install` - installs SDK CLIs from released artifacts or delegates to source builds +- **SDK Version Lookup**: `otdf-sdk-mgr versions list` - lists released artifacts across registries (Go git tags, npm, Maven Central, GitHub Releases) +- **Platform**: `platform/` - OpenTDF platform service +- **Test Runner**: pytest with custom CLI options -- Enter the dev environment: `devbox shell` (installs Python/JDK/Node per `devbox.json`). -- Install xtest deps: `cd xtest && uv sync` (or `uv sync --extra dev` for dev tools) -- Run xtest: `cd xtest && uv run pytest` - - Focus a subset: `uv run pytest --sdks "go js" --focus go` (see `xtest/conftest.py` options) - - HTML report: `uv run pytest --html tmp/test-report.html --self-contained-html` -- Build SDK CLIs (after checkout): `cd xtest/sdk && make` -- Run vulnerability tests: `cd vulnerability && npm ci && npm test` (requires a running platform; see `README.md` and `vulnerability/README.md`). +### Configuring SDK Artifacts -## Coding Style & Naming Conventions +Use `otdf-sdk-mgr` (uv-managed CLI in `tests/otdf-sdk-mgr/`) to install SDK CLIs from released artifacts or source. See `otdf-sdk-mgr/README.md` for full command reference. -- Python code in `xtest/` uses 4-space indentation, `snake_case`, and `pytest`-style fixtures. -- CI enforces these checks in `xtest/`: - - `ruff check` and `ruff format --check` - - `pyright` - - Local equivalent: `cd xtest && uv sync --extra dev && uv run ruff check . && uv run ruff format --check . && uv run pyright` +```bash +cd tests/otdf-sdk-mgr && uv tool install --editable . +otdf-sdk-mgr install stable # Latest stable releases (recommended) +otdf-sdk-mgr install tip go # Build from source +``` + +### Running Tests + +```bash +# Configure environment +cd xtest && set -a && source test.env && set +a -## Testing Guidelines +# Run with specific SDK +uv run pytest --sdks go -v -- `pytest` tests live in `xtest/test_*.py`; add new fixtures under `xtest/fixtures/`. -- Tests assume a platform backend is reachable (Docker + Keycloak). Use `xtest/test.env` as a template: - - `cd xtest && set -a && source test.env && set +a` +# Run with multiple SDKs (space-separated) +uv run pytest --sdks "go java js" -v + +# Run specific test file +uv run pytest test_tdfs.py --sdks go -v + +# Run specific test +uv run pytest test_tdfs.py::test_tdf_roundtrip --sdks go -v +``` ### Custom pytest Options - `--sdks`: Specify which SDKs to test (go, java, js) @@ -78,7 +88,179 @@ export KAS_BETA_LOG_FILE=/path/to/kas-beta.log Or ensure services are running with logs in `../../platform/logs/` (auto-discovered). -- Use semantic commit/PR titles (enforced by CI): `feat(xtest): ...`, `fix(vulnerability): ...`, `docs: ...` (types: `fix|feat|chore|docs`; scopes include `xtest`, `vulnerability`, `go`, `java`, `web`, `ci`). -- DCO sign-off is required: `git commit -s -m "feat(xtest): ..."` (see `CONTRIBUTING.md`). -- PRs should include a clear description, linked issue (if any), and relevant logs/screenshots for test failures; reviewers are defined in `CODEOWNERS`. +## Environment Management + +Use `otdf-local` for all environment management (starting/stopping services, viewing logs, restart procedures, troubleshooting). See: +- `otdf-local/README.md` - command reference and installation +- `otdf-local/CLAUDE.md` - operational procedures (restarts, tmux navigation, golden key config, troubleshooting) + +Quick start: `cd tests/otdf-local && uv run otdf-local up` + +## Key Concepts + +### TDF Wrapping Algorithms + +**RSA Wrapping (wrapped)**: +- Traditional approach +- Uses RSA public key to wrap symmetric key +- KAO type: "wrapped" + +**EC Wrapping (ec-wrapped)**: +- Elliptic curve based wrapping +- Uses ephemeral key pair + key derivation +- KAO type: "ec-wrapped" +- Requires: `ec_tdf_enabled: true` in platform config + +**Base Key Behavior**: +- When platform advertises a `base_key` in `.well-known/opentdf-configuration` + +**Ensuring Key Consistency**: +```bash +# km instances must use platform's root_key: +PLATFORM_ROOT_KEY=$(yq e '.services.kas.root_key' "$PLATFORM_DIR/opentdf-dev.yaml") +yq e -i ".services.kas.root_key = \"$PLATFORM_ROOT_KEY\"" "$CONFIG_FILE" +``` + +## Common Test Failures and Debugging + +### EC Wrapping Test Failures: "EC wrapping not supported" + +**Fix**: +```bash +yq e -i '.services.kas.preview.ec_tdf_enabled = true' platform/opentdf.yaml +yq e -i '.services.kas.preview.ec_tdf_enabled = true' platform/opentdf-dev.yaml +# Restart the platform service +``` + +### ABAC Test Failures: Decrypt Errors + +**Symptom**: ABAC autoconfigure tests fail during decrypt + +**Root Cause**: KAS instances (alpha, beta, etc.) not registered in platform's KAS registry + +**Debug**: +```bash +curl http://localhost:8080/api/kas/v2/kas/key-access-servers | jq '.key_access_servers[].uri' +# Expected: alpha=8181, beta=8282, gamma=8383, delta=8484 +``` + +**Fix**: Ensure all KAS instances are properly registered during startup. + +### Legacy/Golden TDF Test Failures + +**Symptom**: "cipher: message authentication failed" + +**Root Cause**: Golden TDFs require specific keys loaded by the platform. Ensure the platform is configured with the correct golden keys. + +```bash +cd xtest +uv run pytest test_legacy.py --sdks go -v --no-audit-logs +``` + +### Missing Environment Variables + +**Symptom**: "OT_ROOT_KEY environment variable is not set" + +**Fix**: +```bash +export OT_ROOT_KEY=$(yq e '.services.kas.root_key' platform/opentdf-dev.yaml) +export SCHEMA_FILE=/path/to/schema.json +``` + +## Debugging Workflow + +1. **Run tests**: `uv run pytest --sdks go -v 2>&1 | tee test_output.log` +2. **Analyze failures**: Read error messages, check which test category is failing, look for patterns +3. **Inspect platform state**: + ```bash + curl http://localhost:8080/.well-known/opentdf-configuration | jq + curl http://localhost:8080/api/kas/v2/kas/key-access-servers | jq + curl http://localhost:8080/healthz + ``` +4. **Check service logs**: Look at platform and KAS log files for errors +5. **Manual reproduction**: + ```bash + sdk/go/dist/main/cli.sh encrypt test.txt test.tdf --attr https://example.com/attr/foo/value/bar + sdk/go/dist/main/cli.sh decrypt test.tdf test.out.txt + ``` +6. **Fix and verify**: Make changes, restart services if needed, re-run failing test, then run full suite + +## Code Modification Best Practices + +### Before Making Changes + +1. **Always read files first** - use Read tool before Edit/Write +2. **Understand existing patterns** - check similar code in the codebase +3. **Check test coverage** - modifications may affect multiple tests +4. **Review related config** - platform config, KAS config, SDK config + +### When Modifying SDK Code + +After changes to SDK source, rebuild with `cd xtest/sdk && make`. + +### When Modifying Platform Code + +Restart the platform service after making changes. + +### When Modifying Test Code + +- **Test fixtures**: Changes affect all tests using that fixture +- **Helper functions**: Used across multiple test files +- **Conftest.py**: Session-scoped fixtures, careful with modifications + +## Test File Organization + +### Key Test Files + +- `test_tdfs.py` - Core TDF roundtrip, manifest validation, tampering tests +- `test_abac.py` - ABAC policy, autoconfigure, key management tests +- `test_legacy.py` - Backward compatibility with golden TDFs (requires golden-r1 key) +- `test_policytypes.py` - Policy type tests (OR, AND, hierarchy) +- `test_self.py` - Platform API tests (namespaces, attributes, SCS) + +### Important Helper Files + +- `tdfs.py` - SDK abstraction layer, core test utilities +- `fixtures/` - pytest fixtures (attributes, keys, SDKs, etc.) +- `conftest.py` - pytest configuration and shared fixtures + +## Common Pitfalls + +1. **Forgetting to rebuild SDK** after code changes +2. **Modifying wrong config file** (opentdf.yaml vs opentdf-dev.yaml) +3. **Not restarting services** after config changes +4. **Root key mismatches** between platform and KAS instances +5. **Port conflicts** from old processes still running +6. **Assuming SDK behavior** without checking platform configuration + +## Quick Reference + +### Platform Inspection +```bash +curl localhost:8080/.well-known/opentdf-configuration | jq +curl localhost:8080/api/kas/v2/kas/key-access-servers | jq '.key_access_servers[].uri' +curl localhost:8080/healthz +yq e '.services.kas.root_key' platform/opentdf-dev.yaml +``` + +## Summary + +### Preferred Workflow + +1. **Build SDK CLIs**: `cd xtest/sdk && make` +2. **Configure environment**: `cd xtest && set -a && source test.env && set +a` +3. **Run tests**: `uv run pytest --sdks go -v` +4. **Restart after config changes**: Restart the affected platform/KAS services + +### When Debugging Test Failures + +1. Read error messages carefully - they guide you to the root cause +2. Check platform configuration matches expected test behavior +3. Verify all KAS instances have consistent keys +4. Ensure services are running and healthy +5. Check service logs for errors +6. Reproduce issues manually when possible +7. Always restart services after config changes +8. Read before writing - understand existing code patterns +The test failures are usually symptoms of configuration mismatches, not SDK bugs. Focus on ensuring the local environment matches what the tests expect. diff --git a/otdf-local/CLAUDE.md b/otdf-local/CLAUDE.md new file mode 100644 index 00000000..ec4a9307 --- /dev/null +++ b/otdf-local/CLAUDE.md @@ -0,0 +1,186 @@ +# otdf-local - Agent Operational Guide + +This guide covers operational procedures for managing the test environment with `otdf-local`. For command reference, see [README.md](README.md). + +## Environment Setup for pytest + +```bash +cd tests/otdf-local +eval $(uv run otdf-local env) # Sets PLATFORM_LOG_FILE, KAS_*_LOG_FILE, etc. +uv run otdf-local env --format json # Output as JSON +cd ../xtest +uv run pytest --sdks go -v +``` + +## Service Ports + +Auto-configured by otdf-local: +- Keycloak: 8888, Postgres: 5432, Platform: 8080 +- KAS: alpha=8181, beta=8282, gamma=8383, delta=8484, km1=8585, km2=8686 + +## Restart Procedures + +### Full Environment Restart +```bash +cd tests/otdf-local + +uv run otdf-local down +uv run otdf-local up + +# Or with cleanup +uv run otdf-local down --clean +uv run otdf-local up +``` + +### Service-Specific Restart +```bash +cd tests/otdf-local + +uv run otdf-local restart platform +uv run otdf-local restart kas-alpha +uv run otdf-local restart kas-km1 +uv run otdf-local restart docker +``` + +### Manual Restart (Emergency) + +Only use when otdf-local itself is broken or unresponsive: + +```bash +pkill -9 -f "go.*service.*start" +pkill -9 -f "opentdf-kas" +tmux kill-session -t xtest 2>/dev/null || true +cd platform && docker compose down 2>/dev/null || true +sleep 5 +cd tests/otdf-local && uv run otdf-local up +``` + +### Platform Only (Manual) +```bash +# Via tmux session +tmux attach -t xtest +# Navigate to window 1 (platform) +# Ctrl-B 1 → Ctrl-C → Up arrow + Enter + +# Or via kill + restart +pkill -9 -f "go.*service.*start" +sleep 2 +cd platform && go run ./service start +``` + +### Individual KAS (Manual) +```bash +tmux attach -t xtest +# Window numbers: kas-alpha=2, beta=3, gamma=4, delta=5, km1=6, km2=7 +# Ctrl-B → Ctrl-C → Up arrow + Enter +``` + +## Viewing Service Logs + +**Via otdf-local:** +```bash +uv run otdf-local logs -f # Follow all +uv run otdf-local logs platform -f # Follow specific service +uv run otdf-local logs --grep "error" -f # Filter +``` + +**Via tmux session:** +```bash +tmux attach -t xtest + +# Navigate windows +Ctrl-B 0-9 # Switch to window by number +Ctrl-B w # Show window list +Ctrl-B n/p # Next/previous window + +# Scroll logs +Ctrl-B [ # Enter scroll mode +q # Exit scroll mode + +# Detach +Ctrl-B d +``` + +**Via log files:** +```bash +tail -f tests/xtest/logs/platform.log +tail -f tests/xtest/logs/kas-alpha.log +tail -f tests/xtest/logs/kas-km1.log +``` + +## Golden Key Auto-Configuration + +When using `otdf-local up` or `otdf-local restart platform`, golden keys are automatically configured: +1. `otdf-local` reads `tests/xtest/extra-keys.json` containing the `golden-r1` key +2. Key files are extracted to `platform/golden-r1-private.pem` and `platform/golden-r1-cert.pem` +3. The key is added to `cryptoProvider.standard.keys` in the platform config +4. A legacy keyring entry is added to `services.kas.keyring` + +**Manual configuration** (if not using otdf-local): + +Add to `platform/opentdf-dev.yaml`: +```yaml +services: + kas: + keyring: + - kid: golden-r1 + alg: rsa:2048 + legacy: true +server: + cryptoProvider: + standard: + keys: + - kid: golden-r1 + alg: rsa:2048 + private: golden-r1-private.pem + cert: golden-r1-cert.pem +``` + +Extract key files: +```bash +jq -r '.[0].privateKey' tests/xtest/extra-keys.json > platform/golden-r1-private.pem +jq -r '.[0].cert' tests/xtest/extra-keys.json > platform/golden-r1-cert.pem +``` + +## Troubleshooting + +```bash +cd tests/otdf-local + +# Check service status +uv run otdf-local status +uv run otdf-local ls --all + +# View service logs +uv run otdf-local logs platform -f +uv run otdf-local logs kas-alpha -f +uv run otdf-local logs --grep error + +# Or check log files directly +tail -f tests/xtest/logs/platform.log +tail -f tests/xtest/logs/kas-alpha.log + +# Kill stuck processes +pkill -9 -f "go.*service.*start" + +# Check port availability +lsof -i :8080 # Platform +lsof -i :8181 # KAS alpha +lsof -i :8888 # Keycloak +``` + +## Extending otdf-local + +The shell scripts in `scripts/lib/` are deprecated. For new automation: +- Extend the `otdf-local` Python CLI instead of creating new shell scripts +- The codebase uses Python with Pydantic for type safety and better error handling +- See `README.md` for project structure + +To test changes: +```bash +cd tests/otdf-local +uv run otdf-local down +uv run otdf-local up +uv run otdf-local status +uv run otdf-local logs -f +``` diff --git a/otdf-local/README.md b/otdf-local/README.md new file mode 100644 index 00000000..b78e1210 --- /dev/null +++ b/otdf-local/README.md @@ -0,0 +1,272 @@ +# otdf-local - OpenTDF Test Environment Manager + +A Python CLI for managing the OpenTDF test environment, providing a cleaner alternative to the existing shell scripts. + +## Installation + +```bash +cd tests/otdf-local +uv sync +``` + +## Global Installation with Tab Completion + +For convenience, you can install `otdf-local` globally and enable tab completion: + +```bash +cd tests/otdf-local +uv tool install --editable . +otdf-local --install-completion +``` + +This makes `otdf-local` available globally. You can now run `otdf-local` from any directory. + +To uninstall: +```bash +uv tool uninstall otdf-local +``` + +**Using Tab Completion:** +```bash +otdf-local # Shows available commands +otdf-local logs # Shows service names +otdf-local restart # Shows restartable services +``` + +## Quick Start + +```bash +# Start all services +uv run otdf-local up + +# Check status +uv run otdf-local status + +# View logs +uv run otdf-local logs -f + +# Stop all services +uv run otdf-local down +``` + +## Commands + +### `up` - Start Environment + +Start all or specific services. + +```bash +# Start everything (docker, platform, all KAS instances) +otdf-local up + +# Start only Docker services +otdf-local up --services docker + +# Start without running provisioning +otdf-local up --no-provision +``` + +### `down` - Stop Environment + +```bash +# Stop all services +otdf-local down + +# Stop and clean up logs/configs +otdf-local down --clean +``` + +### `ls` - List Services + +```bash +# List running services +otdf-local ls + +# List all services (including stopped) +otdf-local ls --all + +# Output as JSON +otdf-local ls --json +``` + +### `status` - Show Status + +```bash +# Show current status with health checks +otdf-local status + +# Output as JSON +otdf-local status --json + +# Watch mode (updates every second) +otdf-local status --watch +``` + +### `logs` - View Logs + +```bash +# Show recent logs from all services +otdf-local logs + +# Follow logs (like tail -f) +otdf-local logs -f + +# Show logs from specific service +otdf-local logs platform +otdf-local logs kas-alpha + +# Show more lines +otdf-local logs -n 100 + +# Filter by pattern +otdf-local logs --grep error +``` + +### `restart` - Restart Service + +```bash +# Restart platform +otdf-local restart platform + +# Restart a KAS instance +otdf-local restart kas-alpha + +# Restart Docker services +otdf-local restart docker +``` + +### `provision` - Run Provisioning + +```bash +# Run all provisioning +otdf-local provision + +# Provision only Keycloak +otdf-local provision keycloak + +# Provision only fixtures +otdf-local provision fixtures +``` + +### `env` - Configure Shell for pytest + +Export environment variables needed by pytest (log file paths, etc.): + +```bash +# Set up environment for running tests +eval $(otdf-local env) + +# Output as JSON +otdf-local env --format json +``` + +### `clean` - Clean Up + +```bash +# Clean generated configs and logs +otdf-local clean + +# Clean but keep logs +otdf-local clean --keep-logs +``` + +## Tmux Session + +`otdf-local up` creates a tmux session named `xtest` with each service in its own window: + +| Window | Service | +|--------|---------| +| 0 | docker | +| 1 | platform | +| 2 | kas-alpha | +| 3 | kas-beta | +| 4 | kas-gamma | +| 5 | kas-delta | +| 6 | kas-km1 | +| 7 | kas-km2 | + +```bash +# Attach to session +tmux attach -t xtest + +# Navigate windows +Ctrl-B 0-9 # Switch by number +Ctrl-B w # Window list +Ctrl-B n/p # Next/previous + +# Scroll logs in a window +Ctrl-B [ # Enter scroll mode +q # Exit scroll mode + +# Detach +Ctrl-B d +``` + +Log files are also written to `tests/xtest/logs/` (e.g., `platform.log`, `kas-alpha.log`). + +## Services Managed + +| Service | Port | Type | Description | +|---------|------|------|-------------| +| keycloak | 8888 | Docker | Authentication | +| postgres | 5432 | Docker | Database | +| platform | 8080 | Subprocess | Main OpenTDF platform | +| kas-alpha | 8181 | Subprocess | Standard KAS | +| kas-beta | 8282 | Subprocess | Standard KAS | +| kas-gamma | 8383 | Subprocess | Standard KAS | +| kas-delta | 8484 | Subprocess | Standard KAS | +| kas-km1 | 8585 | Subprocess | Key management KAS | +| kas-km2 | 8686 | Subprocess | Key management KAS | + +## Configuration + +Environment variables: + +| Variable | Default | Description | +|----------|---------|-------------| +| `OTDF_LOCAL_PLATFORM_URL` | http://localhost:8080 | Platform URL | +| `OTDF_LOCAL_KEYCLOAK_URL` | http://localhost:8888 | Keycloak URL | +| `OTDF_LOCAL_HEALTH_TIMEOUT` | 60 | Health check timeout (seconds) | +| `OTDF_LOCAL_LOG_LEVEL` | info | Log level | + +## Development + +Run tests: + +```bash +# Unit tests +uv run pytest tests/test_health.py -v + +# Integration tests (requires Docker) +uv run pytest tests/test_integration.py -v -m integration +``` + +## Project Structure + +``` +otdf-local/ +├── src/otdf_local/ +│ ├── cli.py # Typer CLI commands +│ ├── config/ +│ │ ├── ports.py # Port constants +│ │ └── settings.py # Pydantic settings +│ ├── services/ +│ │ ├── base.py # Service ABC +│ │ ├── docker.py # Docker compose management +│ │ ├── platform.py # Platform service +│ │ ├── kas.py # KAS instance management +│ │ └── provisioner.py # Keycloak/fixtures provisioning +│ ├── health/ +│ │ ├── checks.py # HTTP/port health checks +│ │ └── waits.py # Wait-for-ready utilities +│ ├── process/ +│ │ ├── manager.py # Subprocess lifecycle +│ │ └── logs.py # Log aggregation +│ └── utils/ +│ ├── yaml.py # YAML manipulation +│ ├── keys.py # Key generation +│ └── console.py # Rich console helpers +└── tests/ + ├── test_health.py # Unit tests + └── test_integration.py # Integration tests +``` diff --git a/otdf-local/pyproject.toml b/otdf-local/pyproject.toml new file mode 100644 index 00000000..1a8c27f7 --- /dev/null +++ b/otdf-local/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "otdf-local" +version = "0.1.0" +description = "Local management CLI for OpenTDF test environment" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "docker>=7.0.0", + "httpx>=0.27.0", + "pydantic-settings>=2.2.0", + "rich>=13.7.0", + "ruamel.yaml>=0.18.0", + "typer>=0.12.0", +] + +[dependency-groups] +dev = [ + "pyright>=1.1.408", + "pytest>=8.0.0", + "ruff>=0.9.0", +] + +[project.scripts] +otdf-local = "otdf_local.cli:app" + +[build-system] +requires = ["uv_build>=0.9.28"] +build-backend = "uv_build" + +[tool.pytest.ini_options] +pythonpath = "src" +testpaths = ["tests"] diff --git a/otdf-local/src/otdf_local/__init__.py b/otdf-local/src/otdf_local/__init__.py new file mode 100644 index 00000000..aa411484 --- /dev/null +++ b/otdf-local/src/otdf_local/__init__.py @@ -0,0 +1,3 @@ +"""otdf-local - Local management CLI for OpenTDF test environment.""" + +__version__ = "0.1.0" diff --git a/otdf-local/src/otdf_local/__main__.py b/otdf-local/src/otdf_local/__main__.py new file mode 100644 index 00000000..259aece4 --- /dev/null +++ b/otdf-local/src/otdf_local/__main__.py @@ -0,0 +1,6 @@ +"""Entry point for running otdf-local as a module.""" + +from otdf_local.cli import app + +if __name__ == "__main__": + app() diff --git a/otdf-local/src/otdf_local/cli.py b/otdf-local/src/otdf_local/cli.py new file mode 100644 index 00000000..ec5a9499 --- /dev/null +++ b/otdf-local/src/otdf_local/cli.py @@ -0,0 +1,630 @@ +"""Typer CLI for otdf_local - OpenTDF test environment management.""" + +import json +from typing import Annotated + +import typer +from rich.live import Live + +from otdf_local import __version__ +from otdf_local.config.ports import Ports +from otdf_local.config.settings import get_settings +from otdf_local.health.waits import WaitTimeoutError, wait_for_health, wait_for_port +from otdf_local.process.logs import LogAggregator +from otdf_local.services import ( + Provisioner, + ProvisionResult, + get_docker_service, + get_kas_manager, + get_platform_service, + get_provisioner, +) +from otdf_local.utils.console import ( + console, + create_service_table, + format_health, + format_status, + print_error, + print_info, + print_success, + print_warning, + status_spinner, +) +from otdf_local.utils.yaml import get_nested, load_yaml + +app = typer.Typer( + name="otdf-local", + help="Local management CLI for OpenTDF test environment", + no_args_is_help=True, +) + + +def _show_provision_error(result: ProvisionResult, target: str) -> None: + """Display provisioning error with stderr details.""" + print_error(f"{target} provisioning failed (exit code {result.return_code})") + + if result.stderr: + # Show last 15 lines of stderr + lines = result.stderr.strip().split("\n")[-15:] + console.print("\n[dim]Error output:[/dim]") + for line in lines: + console.print(f" {line}") + + +def version_callback(value: bool) -> None: + """Print version and exit.""" + if value: + console.print(f"otdf-local version {__version__}") + raise typer.Exit() + + +@app.callback() +def main( + version: Annotated[ + bool, + typer.Option( + "--version", + "-v", + help="Show version and exit", + callback=version_callback, + is_eager=True, + ), + ] = False, +) -> None: + """OpenTDF test environment management CLI.""" + pass + + +@app.command() +def up( + services: Annotated[ + str | None, + typer.Option( + "--services", + "-s", + help="Comma-separated list of services to start (docker,platform,kas)", + ), + ] = None, + no_provision: Annotated[ + bool, + typer.Option("--no-provision", help="Skip provisioning step"), + ] = False, +) -> None: + """Start the test environment. + + By default starts all services: docker (keycloak, postgres), platform, and all KAS instances. + """ + settings = get_settings() + settings.ensure_directories() + + # Parse services to start + if services: + service_list = [s.strip().lower() for s in services.split(",")] + else: + service_list = ["docker", "platform", "kas"] + + start_docker = "docker" in service_list + start_platform = "platform" in service_list + start_kas = "kas" in service_list + + # Step 1: Start Docker services + if start_docker: + print_info("Starting Docker services (Keycloak, PostgreSQL)...") + docker = get_docker_service(settings) + if not docker.start(): + print_error("Failed to start Docker services") + raise typer.Exit(1) + + with status_spinner("Waiting for Keycloak..."): + try: + wait_for_health( + f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master", + timeout=120, + service_name="Keycloak", + ) + except WaitTimeoutError as e: + print_error(str(e)) + raise typer.Exit(1) from e + print_success("Keycloak is ready") + + print_info("Waiting for PostgreSQL...") + try: + wait_for_port( + Ports.POSTGRES, + "localhost", + timeout=60, + service_name="PostgreSQL", + ) + except WaitTimeoutError as e: + print_error(str(e)) + raise typer.Exit(1) from e + print_success("PostgreSQL is ready") + + if not no_provision: + print_info("Provisioning Keycloak...") + provisioner: Provisioner = get_provisioner(settings) + result = provisioner.provision_keycloak() + if not result: + _show_provision_error(result, "Keycloak") + raise typer.Exit(1) + print_success("Keycloak provisioned") + + # Step 2: Start Platform + if start_platform: + print_info("Starting Platform...") + platform = get_platform_service(settings) + if not platform.start(): + print_error("Failed to start Platform") + raise typer.Exit(1) + + with status_spinner("Waiting for Platform..."): + try: + wait_for_health( + f"http://localhost:{Ports.PLATFORM}/healthz", + timeout=120, + service_name="Platform", + ) + except WaitTimeoutError as e: + print_error(str(e)) + raise typer.Exit(1) from e + print_success("Platform is ready") + + # Step 3: Provision + if start_platform and not no_provision: + print_info("Provisioning fixtures...") + provisioner = get_provisioner(settings) + result = provisioner.provision_fixtures() + if not result: + _show_provision_error(result, "Fixtures") + print_warning("Provisioning had issues - continuing anyway") + else: + print_success("Provisioning complete") + + # Step 4: Start KAS instances + if start_kas: + print_info("Starting KAS instances...") + kas_manager = get_kas_manager(settings) + results = kas_manager.start_all() + + failed = [name for name, ok in results.items() if not ok] + if failed: + print_error(f"Failed to start KAS instances: {', '.join(failed)}") + raise typer.Exit(1) + + with status_spinner("Waiting for KAS instances..."): + for kas_name in Ports.all_kas_names(): + port = Ports.get_kas_port(kas_name) + try: + wait_for_health( + f"http://localhost:{port}/healthz", + timeout=60, + service_name=f"KAS {kas_name}", + ) + except WaitTimeoutError as e: + print_warning(str(e)) + print_success("KAS instances are ready") + + print_success("Environment is up!") + + +@app.command() +def down( + clean: Annotated[ + bool, + typer.Option("--clean", help="Also clean logs and generated configs"), + ] = False, +) -> None: + """Stop all services.""" + settings = get_settings() + + # Stop KAS instances + print_info("Stopping KAS instances...") + kas_manager = get_kas_manager(settings) + kas_manager.stop_all() + + # Stop Platform + print_info("Stopping Platform...") + platform = get_platform_service(settings) + platform.stop() + + # Stop Docker + print_info("Stopping Docker services...") + docker = get_docker_service(settings) + docker.stop() + + if clean: + print_info("Cleaning up...") + _do_clean(settings, keep_logs=False) + + print_success("Environment stopped") + + +@app.command(name="ls") +def list_services( + json_output: Annotated[ + bool, + typer.Option("--json", "-j", help="Output as JSON"), + ] = False, + all_services: Annotated[ + bool, + typer.Option("--all", "-a", help="Include stopped services"), + ] = False, +) -> None: + """List all services with their status.""" + settings = get_settings() + + # Gather all service info + docker = get_docker_service(settings) + platform = get_platform_service(settings) + kas_manager = get_kas_manager(settings) + + all_info = [] + all_info.extend(docker.get_all_info()) + all_info.append(platform.get_info()) + all_info.extend(kas_manager.get_all_info()) + + # Filter if not showing all + if not all_services: + all_info = [info for info in all_info if info.running] + + if json_output: + output = [info.to_dict() for info in all_info] + console.print_json(json.dumps(output)) + return + + # Table output + table = create_service_table() + for info in all_info: + table.add_row( + info.name, + str(info.port), + info.service_type.value, + format_status(info.running), + format_health(info.healthy), + ) + + console.print(table) + + +@app.command() +def status( + json_output: Annotated[ + bool, + typer.Option("--json", "-j", help="Output as JSON"), + ] = False, + watch: Annotated[ + bool, + typer.Option("--watch", "-w", help="Continuously update status"), + ] = False, +) -> None: + """Show detailed status with health checks.""" + settings = get_settings() + + def get_status_table(): + docker = get_docker_service(settings) + platform = get_platform_service(settings) + kas_manager = get_kas_manager(settings) + + all_info = [] + all_info.extend(docker.get_all_info()) + all_info.append(platform.get_info()) + all_info.extend(kas_manager.get_all_info()) + + if json_output: + return json.dumps([info.to_dict() for info in all_info], indent=2) + + table = create_service_table() + for info in all_info: + table.add_row( + info.name, + str(info.port), + info.service_type.value, + format_status(info.running), + format_health(info.healthy), + ) + return table + + if watch: + import time + + with Live(get_status_table(), refresh_per_second=1, console=console) as live: + while True: + time.sleep(1) + live.update(get_status_table()) + else: + result = get_status_table() + if json_output: + console.print(result) + else: + console.print(result) + + +@app.command() +def logs( + service: Annotated[ + str | None, + typer.Argument(help="Service name (platform, kas-alpha, etc.)"), + ] = None, + follow: Annotated[ + bool, + typer.Option("--follow", "-f", help="Follow log output"), + ] = False, + lines: Annotated[ + int, + typer.Option("--lines", "-n", help="Number of lines to show"), + ] = 50, + grep: Annotated[ + str | None, + typer.Option("--grep", "-g", help="Filter by regex pattern"), + ] = None, +) -> None: + """View service logs. + + Without a service name, shows aggregated logs from all services. + """ + settings = get_settings() + aggregator = LogAggregator(settings.logs_dir) + + # Add all services to aggregator + aggregator.add_service("platform") + for kas_name in Ports.all_kas_names(): + aggregator.add_service(f"kas-{kas_name}") + + # Determine which services to show + services_filter = [service] if service else None + + if follow: + print_info("Following logs (Ctrl+C to stop)...") + try: + for entry in aggregator.follow(services=services_filter): + if grep and grep.lower() not in entry.message.lower(): + continue + _print_log_entry(entry) + except KeyboardInterrupt: + pass + else: + entries = aggregator.read_tail(n=lines, services=services_filter, pattern=grep) + for entry in entries: + _print_log_entry(entry) + + +def _print_log_entry(entry) -> None: + """Format and print a log entry.""" + timestamp = "" + if entry.timestamp: + timestamp = entry.timestamp.strftime("%H:%M:%S") + console.print( + f"[dim]{timestamp}[/dim] [cyan]{entry.service}[/cyan] {entry.message}" + ) + + +@app.command() +def clean( + keep_logs: Annotated[ + bool, + typer.Option("--keep-logs", help="Keep log files"), + ] = False, +) -> None: + """Clean up generated files and logs.""" + settings = get_settings() + _do_clean(settings, keep_logs) + print_success("Cleanup complete") + + +def _do_clean(settings, keep_logs: bool) -> None: + """Perform cleanup.""" + import shutil + + # Clean config directory + if settings.config_dir.exists(): + shutil.rmtree(settings.config_dir) + settings.config_dir.mkdir(parents=True, exist_ok=True) + + # Clean logs unless keeping them + if not keep_logs and settings.logs_dir.exists(): + shutil.rmtree(settings.logs_dir) + settings.logs_dir.mkdir(parents=True, exist_ok=True) + + +@app.command() +def provision( + target: Annotated[ + str, + typer.Argument(help="What to provision: keycloak, fixtures, or all"), + ] = "all", +) -> None: + """Run provisioning steps.""" + settings = get_settings() + provisioner = get_provisioner(settings) + + if target == "all": + print_info("Running all provisioning...") + result = provisioner.provision_all() + if result: + print_success("Provisioning complete") + else: + _show_provision_error(result, "Provisioning") + raise typer.Exit(1) + elif target == "keycloak": + print_info("Provisioning Keycloak...") + result = provisioner.provision_keycloak() + if result: + print_success("Keycloak provisioning complete") + else: + _show_provision_error(result, "Keycloak") + raise typer.Exit(1) + elif target == "fixtures": + print_info("Provisioning fixtures...") + result = provisioner.provision_fixtures() + if result: + print_success("Fixtures provisioning complete") + else: + _show_provision_error(result, "Fixtures") + raise typer.Exit(1) + else: + print_error(f"Unknown target: {target}") + print_info("Valid targets: keycloak, fixtures, all") + raise typer.Exit(1) + + +@app.command() +def restart( + service: Annotated[ + str, + typer.Argument(help="Service to restart (platform, kas-alpha, docker, etc.)"), + ], +) -> None: + """Restart a specific service.""" + settings = get_settings() + + if service == "docker": + print_info("Restarting Docker services...") + docker = get_docker_service(settings) + docker.stop() + docker.start() + print_success("Docker services restarted") + return + + if service == "platform": + print_info("Restarting Platform...") + platform = get_platform_service(settings) + platform.restart() + print_success("Platform restarted") + return + + if service.startswith("kas-"): + kas_name = service[4:] # Remove "kas-" prefix + if kas_name not in Ports.all_kas_names(): + print_error(f"Unknown KAS instance: {kas_name}") + print_info(f"Valid KAS names: {', '.join(Ports.all_kas_names())}") + raise typer.Exit(1) + + print_info(f"Restarting KAS {kas_name}...") + kas_manager = get_kas_manager(settings) + kas = kas_manager.get(kas_name) + if kas: + kas.restart() + print_success(f"KAS {kas_name} restarted") + else: + print_error(f"KAS {kas_name} not found") + raise typer.Exit(1) + return + + # Check for KAS name without prefix + if service in Ports.all_kas_names(): + print_info(f"Restarting KAS {service}...") + kas_manager = get_kas_manager(settings) + kas = kas_manager.get(service) + if kas: + kas.restart() + print_success(f"KAS {service} restarted") + return + + print_error(f"Unknown service: {service}") + print_info( + "Valid services: docker, platform, kas-alpha, kas-beta, kas-gamma, kas-delta, kas-km1, kas-km2" + ) + raise typer.Exit(1) + + +@app.command() +def env( + format: Annotated[ + str, + typer.Option( + "--format", + "-f", + help="Output format: shell (default), json", + ), + ] = "shell", +) -> None: + """Output environment variables for pytest. + + Use this command to configure your shell environment for running tests: + + eval $(otdf-local env) + + Or source it directly: + + . <(otdf-local env) + + This sets variables like PLATFORM_LOG_FILE, KAS_*_LOG_FILE, PLATFORMURL, OT_ROOT_KEY, etc. + """ + settings = get_settings() + + # Build environment variable dict + env_vars = {} + + # Platform configuration + env_vars["PLATFORMURL"] = settings.platform_url + env_vars["PLATFORM_DIR"] = str(settings.platform_dir.resolve()) + + # Schema file for manifest validation + schema_file = settings.platform_dir / "sdk" / "schema" / "manifest.schema.json" + if schema_file.exists(): + env_vars["SCHEMA_FILE"] = str(schema_file.resolve()) + + # Log file paths + platform_log = settings.logs_dir / "platform.log" + if platform_log.exists(): + env_vars["PLATFORM_LOG_FILE"] = str(platform_log.resolve()) + + # KAS log files + kas_env_mapping = { + "alpha": "KAS_ALPHA_LOG_FILE", + "beta": "KAS_BETA_LOG_FILE", + "gamma": "KAS_GAMMA_LOG_FILE", + "delta": "KAS_DELTA_LOG_FILE", + "km1": "KAS_KM1_LOG_FILE", + "km2": "KAS_KM2_LOG_FILE", + } + + for kas_name, env_var in kas_env_mapping.items(): + log_path = settings.get_kas_log_path(kas_name) + if log_path.exists(): + env_vars[env_var] = str(log_path.resolve()) + + # Read platform config to get root key + try: + platform_config = load_yaml(settings.platform_config) + root_key = get_nested(platform_config, "services.kas.root_key") + if root_key: + env_vars["OT_ROOT_KEY"] = root_key + except Exception: + # If we can't read the config, just skip the root key + pass + + # Try to get platform version from API + try: + import requests + + platform = get_platform_service(settings) + if platform.is_running(): + resp = requests.get( + f"{settings.platform_url}/.well-known/opentdf-configuration", + timeout=5, + ) + if resp.ok: + config = resp.json() + if "version" in config: + env_vars["PLATFORM_VERSION"] = config["version"] + except Exception: + # If we can't get the version, that's okay + pass + + # Output in requested format + if format == "json": + console.print_json(json.dumps(env_vars, indent=2)) + else: + # Shell export format - use plain print to avoid line wrapping + import sys + + for key, value in env_vars.items(): + # Escape single quotes in value for shell safety + escaped_value = value.replace("'", "'\\''") + # Use plain print to stdout to avoid rich console line wrapping + print(f"export {key}='{escaped_value}'", file=sys.stdout) + + +if __name__ == "__main__": + app() diff --git a/otdf-local/src/otdf_local/config/__init__.py b/otdf-local/src/otdf_local/config/__init__.py new file mode 100644 index 00000000..565c1a06 --- /dev/null +++ b/otdf-local/src/otdf_local/config/__init__.py @@ -0,0 +1,6 @@ +"""Configuration module for otdf-local.""" + +from otdf_local.config.ports import Ports +from otdf_local.config.settings import Settings, get_settings + +__all__ = ["Ports", "Settings", "get_settings"] diff --git a/otdf-local/src/otdf_local/config/features.py b/otdf-local/src/otdf_local/config/features.py new file mode 100644 index 00000000..567a8193 --- /dev/null +++ b/otdf-local/src/otdf_local/config/features.py @@ -0,0 +1,128 @@ +"""Platform feature detection based on version.""" + +import re +import subprocess +from functools import lru_cache +from pathlib import Path +from typing import Literal + +from pydantic import BaseModel + +FeatureType = Literal[ + "assertions", + "assertion_verification", + "autoconfigure", + "better-messages-2024", + "bulk_rewrap", + "connectrpc", + "ecwrap", + "hexless", + "hexaflexible", + "kasallowlist", + "key_management", + "ns_grants", + "obligations", + "logger_stderr", # Support for logger.output: stderr +] + +_VERSION_RE = re.compile( + r"^(\d+)\.(\d+)\.(\d+)(?:-([0-9a-zA-Z.-]+))?(?:\+([0-9a-zA-Z.-]+))?$" +) + + +class PlatformFeatures(BaseModel): + """Platform feature detection based on version.""" + + version: str + semver: tuple[int, int, int] + features: set[FeatureType] + + @classmethod + def detect(cls, platform_dir: Path) -> "PlatformFeatures": + """Detect platform features by querying the version.""" + version = _get_platform_version(platform_dir) + semver = _parse_semver(version) + features = _compute_features(semver) + return cls(version=version, semver=semver, features=features) + + def supports(self, feature: FeatureType) -> bool: + """Check if a feature is supported.""" + return feature in self.features + + +def _get_platform_version(platform_dir: Path) -> str: + """Get the platform version by running 'go run ./service version'.""" + try: + result = subprocess.run( + ["go", "run", "./service", "version"], + cwd=platform_dir, + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + return result.stdout.strip() + except (subprocess.TimeoutExpired, subprocess.SubprocessError, FileNotFoundError): + pass + + # Default version if detection fails + return "0.9.0" + + +def _parse_semver(version: str) -> tuple[int, int, int]: + """Parse a semver string into (major, minor, patch) tuple.""" + match = _VERSION_RE.match(version) + if not match: + # Default to 0.9.0 if parsing fails + return (0, 9, 0) + + major, minor, patch, _, _ = match.groups() + return (int(major), int(minor), int(patch)) + + +def _compute_features(semver: tuple[int, int, int]) -> set[FeatureType]: + """Compute the set of supported features based on semver.""" + features: set[FeatureType] = { + "assertions", + "assertion_verification", + "autoconfigure", + "better-messages-2024", + } + + # EC wrapping (announced in 0.4.39 but correct salt in 0.4.40) + if semver >= (0, 4, 40): + features.add("ecwrap") + + # Hexless format support + if semver >= (0, 4, 39): + features.add("hexless") + features.add("hexaflexible") + + # Namespace grants + if semver >= (0, 4, 19): + features.add("ns_grants") + + # Connect RPC support + if semver >= (0, 4, 28): + features.add("connectrpc") + + # Key management + if semver >= (0, 6, 0): + features.add("key_management") + + # Obligations support (service v0.11.0) + if semver >= (0, 11, 0): + features.add("obligations") + + # Logger stderr output support (added after v0.9.0) + # v0.9.0 only supports stdout + if semver >= (0, 10, 0): + features.add("logger_stderr") + + return features + + +@lru_cache +def get_platform_features(platform_dir: Path) -> PlatformFeatures: + """Get cached platform features for a directory.""" + return PlatformFeatures.detect(platform_dir) diff --git a/otdf-local/src/otdf_local/config/ports.py b/otdf-local/src/otdf_local/config/ports.py new file mode 100644 index 00000000..984e0eb2 --- /dev/null +++ b/otdf-local/src/otdf_local/config/ports.py @@ -0,0 +1,58 @@ +"""Port constants for all services.""" + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class Ports: + """Port configuration for all services.""" + + # Docker services + KEYCLOAK: int = 8888 + POSTGRES: int = 5432 + + # Platform + PLATFORM: int = 8080 + + # KAS instances + KAS_ALPHA: int = 8181 + KAS_BETA: int = 8282 + KAS_GAMMA: int = 8383 + KAS_DELTA: int = 8484 + KAS_KM1: int = 8585 + KAS_KM2: int = 8686 + + @classmethod + def get_kas_port(cls, name: str) -> int: + """Get port for a KAS instance by name.""" + mapping = { + "alpha": cls.KAS_ALPHA, + "beta": cls.KAS_BETA, + "gamma": cls.KAS_GAMMA, + "delta": cls.KAS_DELTA, + "km1": cls.KAS_KM1, + "km2": cls.KAS_KM2, + } + if name not in mapping: + raise ValueError(f"Unknown KAS instance: {name}") + return mapping[name] + + @classmethod + def all_kas_names(cls) -> list[str]: + """Return all KAS instance names.""" + return ["alpha", "beta", "gamma", "delta", "km1", "km2"] + + @classmethod + def standard_kas_names(cls) -> list[str]: + """Return standard (non-key-management) KAS instance names.""" + return ["alpha", "beta", "gamma", "delta"] + + @classmethod + def km_kas_names(cls) -> list[str]: + """Return key management KAS instance names.""" + return ["km1", "km2"] + + @classmethod + def is_km_kas(cls, name: str) -> bool: + """Check if a KAS instance is a key management instance.""" + return name in cls.km_kas_names() diff --git a/otdf-local/src/otdf_local/config/settings.py b/otdf-local/src/otdf_local/config/settings.py new file mode 100644 index 00000000..b9d97d83 --- /dev/null +++ b/otdf-local/src/otdf_local/config/settings.py @@ -0,0 +1,158 @@ +"""Pydantic settings for otdf_local configuration.""" + +import os +from functools import lru_cache +from pathlib import Path +from typing import Annotated + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + +from otdf_local.config.ports import Ports + + +def _find_xtest_root() -> Path: + """Find the xtest/tests root directory by walking up from this file.""" + current = Path(__file__).resolve() + while current != current.parent: + # Check if we're in the otdf_local package within tests + if (current / "otdf_local").exists() and current.name == "tests": + return current + # Legacy: check for xtest directory + if (current / "conftest.py").exists() and current.name == "xtest": + return current + # Also check if we're in the otdf_local package within xtest + if (current / "otdf_local").exists() and ( + current.parent / "conftest.py" + ).exists(): + return current.parent + current = current.parent + # Fallback to assuming we're in tests/otdf-local + return Path(__file__).resolve().parent.parent.parent.parent + + +def _find_platform_dir(xtest_root: Path) -> Path: + """Find the platform directory by searching for a sibling of an ancestor. + + Searches up the directory tree from xtest_root looking for a 'platform' directory + that has the expected shape (contains docker-compose.yaml and opentdf-dev.yaml). + + Raises: + FileNotFoundError: If platform directory is not found with expected shape. + """ + # Start from xtest_root and walk up + current = xtest_root + while current != current.parent: + # Check siblings at this level + platform_candidate = current.parent / "platform" + if platform_candidate.exists() and platform_candidate.is_dir(): + # Verify it has the expected shape + has_compose = (platform_candidate / "docker-compose.yaml").exists() + has_config = (platform_candidate / "opentdf-dev.yaml").exists() + if has_compose and has_config: + return platform_candidate + current = current.parent + + # If we get here, we didn't find it + raise FileNotFoundError( + f"Could not find platform directory with expected shape " + f"(docker-compose.yaml and opentdf-dev.yaml) searching from {xtest_root}" + ) + + +class Settings(BaseSettings): + """Application settings with environment variable support.""" + + model_config = SettingsConfigDict( + env_prefix="OTDF_LOCAL_", + env_file=".env", + extra="ignore", + ) + + # Directory paths - computed from xtest_root + xtest_root: Path = Field(default_factory=_find_xtest_root) + _platform_dir: Path | None = None + + @property + def platform_dir(self) -> Path: + """Platform source directory.""" + if self._platform_dir is None: + self._platform_dir = _find_platform_dir(self.xtest_root) + return self._platform_dir + + @property + def logs_dir(self) -> Path: + """Logs directory.""" + return self.xtest_root / "tmp" / "logs" + + @property + def keys_dir(self) -> Path: + """Keys directory.""" + return self.xtest_root / "tmp" / "keys" + + @property + def config_dir(self) -> Path: + """Generated config files directory.""" + return self.xtest_root / "tmp" / "config" + + @property + def platform_config(self) -> Path: + """Platform config file path.""" + return self.platform_dir / "opentdf-dev.yaml" + + @property + def platform_template_config(self) -> Path: + """Platform config template path.""" + return self.platform_dir / "opentdf.yaml" + + @property + def kas_template_config(self) -> Path: + """KAS config template path.""" + return self.platform_dir / "opentdf-kas-mode.yaml" + + @property + def docker_compose_file(self) -> Path: + """Docker compose file path.""" + return self.platform_dir / "docker-compose.yaml" + + # Service ports + keycloak_port: int = Ports.KEYCLOAK + postgres_port: int = Ports.POSTGRES + platform_port: int = Ports.PLATFORM + + # URLs + platform_url: Annotated[str, Field(default="http://localhost:8080")] + keycloak_url: Annotated[str, Field(default="http://localhost:8888")] + + # Timeouts (seconds) + health_timeout: int = 60 + startup_timeout: int = 120 + + # Log level + log_level: str = "info" + + def get_kas_port(self, name: str) -> int: + """Get port for a KAS instance.""" + return Ports.get_kas_port(name) + + def get_kas_config_path(self, name: str) -> Path: + """Get config file path for a KAS instance.""" + return self.config_dir / f"kas-{name}.yaml" + + def get_kas_log_path(self, name: str) -> Path: + """Get log file path for a KAS instance.""" + return self.logs_dir / f"kas-{name}.log" + + def ensure_directories(self) -> None: + """Create all required directories.""" + self.logs_dir.mkdir(parents=True, exist_ok=True) + self.config_dir.mkdir(parents=True, exist_ok=True) + self.keys_dir.mkdir(parents=True, exist_ok=True) + + +@lru_cache +def get_settings() -> Settings: + """Get cached settings instance.""" + platform_url = os.environ.get("OTDF_LOCAL_PLATFORM_URL", "http://localhost:8080") + keycloak_url = os.environ.get("OTDF_LOCAL_KEYCLOAK_URL", "http://localhost:8888") + return Settings(platform_url=platform_url, keycloak_url=keycloak_url) diff --git a/otdf-local/src/otdf_local/health/__init__.py b/otdf-local/src/otdf_local/health/__init__.py new file mode 100644 index 00000000..47974103 --- /dev/null +++ b/otdf-local/src/otdf_local/health/__init__.py @@ -0,0 +1,6 @@ +"""Health check module for otdf-local.""" + +from otdf_local.health.checks import check_http_health, check_port +from otdf_local.health.waits import wait_for_health, wait_for_port + +__all__ = ["check_http_health", "check_port", "wait_for_health", "wait_for_port"] diff --git a/otdf-local/src/otdf_local/health/checks.py b/otdf-local/src/otdf_local/health/checks.py new file mode 100644 index 00000000..9e5b9195 --- /dev/null +++ b/otdf-local/src/otdf_local/health/checks.py @@ -0,0 +1,98 @@ +"""HTTP and port health check utilities.""" + +import socket +from typing import Literal + +import httpx + + +def check_port(host: str, port: int, timeout: float = 1.0) -> bool: + """Check if a port is open and accepting connections. + + Args: + host: Host to check + port: Port number + timeout: Connection timeout in seconds + + Returns: + True if port is accepting connections + """ + try: + with socket.create_connection((host, port), timeout=timeout): + return True + except (OSError, TimeoutError): + return False + + +def check_http_health( + url: str, + timeout: float = 5.0, + expected_status: int | tuple[int, ...] = 200, +) -> bool: + """Check if an HTTP endpoint returns a healthy status. + + Args: + url: Full URL to check (e.g., http://localhost:8080/healthz) + timeout: Request timeout in seconds + expected_status: Expected HTTP status code(s) + + Returns: + True if endpoint returns expected status + """ + if isinstance(expected_status, int): + expected_status = (expected_status,) + + try: + response = httpx.get(url, timeout=timeout) + return response.status_code in expected_status + except (httpx.RequestError, httpx.TimeoutException): + return False + + +def get_health_url(service: str, port: int) -> str: + """Get the health check URL for a service. + + Args: + service: Service name (platform, kas, keycloak) + port: Service port + + Returns: + Health check URL + """ + if service == "keycloak": + return f"http://localhost:{port}/auth/realms/master" + elif service in ("platform", "kas"): + return f"http://localhost:{port}/healthz" + else: + return f"http://localhost:{port}/healthz" + + +ServiceStatus = Literal["running", "stopped", "unhealthy", "unknown"] + + +def get_service_status( + port: int, + health_url: str | None = None, +) -> ServiceStatus: + """Get comprehensive status of a service. + + Args: + port: Service port + health_url: Optional health check URL + + Returns: + Service status string + """ + # First check if port is open + if not check_port("localhost", port): + return "stopped" + + # If no health URL provided, assume running + if not health_url: + return "running" + + # Check health endpoint + if check_http_health(health_url): + return "running" + + return "unhealthy" diff --git a/otdf-local/src/otdf_local/health/waits.py b/otdf-local/src/otdf_local/health/waits.py new file mode 100644 index 00000000..4d245bee --- /dev/null +++ b/otdf-local/src/otdf_local/health/waits.py @@ -0,0 +1,149 @@ +"""Wait-for-ready utilities for services.""" + +import time +from collections.abc import Callable + +from otdf_local.health.checks import check_http_health, check_port + + +class WaitTimeoutError(Exception): + """Raised when a wait operation times out.""" + + def __init__(self, service: str, timeout: float) -> None: + self.service = service + self.timeout = timeout + super().__init__(f"Timeout waiting for {service} after {timeout}s") + + +def wait_for_port( + port: int, + host: str = "localhost", + timeout: float = 30.0, + poll_interval: float = 0.5, + service_name: str = "service", +) -> bool: + """Wait for a port to become available. + + Args: + port: Port number to wait for + host: Host to check + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + service_name: Name for error messages + + Returns: + True when port is available + + Raises: + WaitTimeoutError: If timeout is reached + """ + deadline = time.monotonic() + timeout + + while time.monotonic() < deadline: + if check_port(host, port): + return True + time.sleep(poll_interval) + + raise WaitTimeoutError(service_name, timeout) + + +def wait_for_health( + url: str, + timeout: float = 60.0, + poll_interval: float = 1.0, + service_name: str = "service", + expected_status: int | tuple[int, ...] = 200, +) -> bool: + """Wait for an HTTP health endpoint to return healthy. + + Args: + url: Health check URL + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + service_name: Name for error messages + expected_status: Expected HTTP status code(s) + + Returns: + True when healthy + + Raises: + WaitTimeoutError: If timeout is reached + """ + deadline = time.monotonic() + timeout + + while time.monotonic() < deadline: + if check_http_health(url, expected_status=expected_status): + return True + time.sleep(poll_interval) + + raise WaitTimeoutError(service_name, timeout) + + +def wait_for_condition( + condition: Callable[[], bool], + timeout: float = 30.0, + poll_interval: float = 0.5, + service_name: str = "condition", +) -> bool: + """Wait for an arbitrary condition to become true. + + Args: + condition: Callable that returns True when ready + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + service_name: Name for error messages + + Returns: + True when condition is met + + Raises: + WaitTimeoutError: If timeout is reached + """ + deadline = time.monotonic() + timeout + + while time.monotonic() < deadline: + try: + if condition(): + return True + except Exception: + pass # Treat exceptions as "not ready yet" + time.sleep(poll_interval) + + raise WaitTimeoutError(service_name, timeout) + + +def wait_for_multiple( + checks: list[tuple[str, Callable[[], bool]]], + timeout: float = 60.0, + poll_interval: float = 1.0, +) -> dict[str, bool]: + """Wait for multiple conditions, returning status of each. + + Args: + checks: List of (name, condition_callable) tuples + timeout: Maximum time to wait in seconds + poll_interval: Time between check rounds + + Returns: + Dict mapping service names to their final status (True if ready) + """ + deadline = time.monotonic() + timeout + results = {name: False for name, _ in checks} + pending = list(checks) + + while pending and time.monotonic() < deadline: + still_pending = [] + for name, condition in pending: + try: + if condition(): + results[name] = True + else: + still_pending.append((name, condition)) + except Exception: + still_pending.append((name, condition)) + + pending = still_pending + if pending: + time.sleep(poll_interval) + + return results diff --git a/otdf-local/src/otdf_local/process/__init__.py b/otdf-local/src/otdf_local/process/__init__.py new file mode 100644 index 00000000..e82e6801 --- /dev/null +++ b/otdf-local/src/otdf_local/process/__init__.py @@ -0,0 +1 @@ +"""Process management module for otdf-local.""" diff --git a/otdf-local/src/otdf_local/process/logs.py b/otdf-local/src/otdf_local/process/logs.py new file mode 100644 index 00000000..a7f10839 --- /dev/null +++ b/otdf-local/src/otdf_local/process/logs.py @@ -0,0 +1,206 @@ +"""Log file management and aggregation.""" + +import re +import time +from collections.abc import Iterator +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path + + +@dataclass +class LogEntry: + """A single log entry.""" + + timestamp: datetime | None + service: str + message: str + raw: str + + +class LogReader: + """Read and tail log files.""" + + def __init__(self, log_file: Path, service_name: str) -> None: + self.log_file = log_file + self.service_name = service_name + self._position = 0 + + def read_all(self) -> list[LogEntry]: + """Read all lines from the log file.""" + if not self.log_file.exists(): + return [] + + entries = [] + with open(self.log_file) as f: + for line in f: + entries.append(self._parse_line(line)) + return entries + + def read_tail(self, n: int = 50) -> list[LogEntry]: + """Read the last n lines from the log file.""" + if not self.log_file.exists(): + return [] + + # Simple tail implementation + with open(self.log_file) as f: + lines = f.readlines() + + return [self._parse_line(line) for line in lines[-n:]] + + def read_new(self) -> list[LogEntry]: + """Read new lines since last read.""" + if not self.log_file.exists(): + return [] + + entries = [] + with open(self.log_file) as f: + f.seek(self._position) + for line in f: + entries.append(self._parse_line(line)) + self._position = f.tell() + return entries + + def follow(self, poll_interval: float = 0.5) -> Iterator[LogEntry]: + """Continuously yield new log entries.""" + while True: + entries = self.read_new() + yield from entries + if not entries: + time.sleep(poll_interval) + + def _parse_line(self, line: str) -> LogEntry: + """Parse a log line to extract timestamp and message.""" + line = line.rstrip("\n") + + # Try to parse common timestamp formats + timestamp = None + + # ISO format: 2024-01-15T10:30:45.123Z + iso_match = re.match( + r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z?)\s*(.*)", + line, + ) + if iso_match: + try: + ts_str = iso_match.group(1).rstrip("Z") + timestamp = datetime.fromisoformat(ts_str) + line = iso_match.group(2) + except ValueError: + pass + + # Standard format: 2024/01/15 10:30:45 + std_match = re.match( + r"(\d{4}/\d{2}/\d{2}\s+\d{2}:\d{2}:\d{2})\s*(.*)", + line, + ) + if std_match and timestamp is None: + try: + timestamp = datetime.strptime(std_match.group(1), "%Y/%m/%d %H:%M:%S") + line = std_match.group(2) + except ValueError: + pass + + return LogEntry( + timestamp=timestamp, + service=self.service_name, + message=line, + raw=line, + ) + + +class LogAggregator: + """Aggregate logs from multiple services.""" + + def __init__(self, logs_dir: Path) -> None: + self.logs_dir = logs_dir + self._readers: dict[str, LogReader] = {} + + def add_service(self, name: str, log_file: Path | None = None) -> None: + """Add a service to aggregate logs from.""" + if log_file is None: + log_file = self.logs_dir / f"{name}.log" + self._readers[name] = LogReader(log_file, name) + + def read_all( + self, + services: list[str] | None = None, + pattern: str | None = None, + ) -> list[LogEntry]: + """Read all logs, optionally filtered. + + Args: + services: Service names to include (None = all) + pattern: Regex pattern to filter messages + + Returns: + List of log entries, sorted by timestamp + """ + readers = self._get_readers(services) + entries = [] + for reader in readers: + entries.extend(reader.read_all()) + + if pattern: + regex = re.compile(pattern, re.IGNORECASE) + entries = [e for e in entries if regex.search(e.message)] + + # Sort by timestamp, putting None timestamps last + return sorted( + entries, + key=lambda e: (e.timestamp is None, e.timestamp or datetime.max), + ) + + def read_tail( + self, + n: int = 50, + services: list[str] | None = None, + pattern: str | None = None, + ) -> list[LogEntry]: + """Read last n lines from each service. + + Args: + n: Number of lines per service + services: Service names to include (None = all) + pattern: Regex pattern to filter messages + + Returns: + List of log entries, sorted by timestamp + """ + readers = self._get_readers(services) + entries = [] + for reader in readers: + entries.extend(reader.read_tail(n)) + + if pattern: + regex = re.compile(pattern, re.IGNORECASE) + entries = [e for e in entries if regex.search(e.message)] + + return sorted( + entries, + key=lambda e: (e.timestamp is None, e.timestamp or datetime.max), + ) + + def follow( + self, + services: list[str] | None = None, + poll_interval: float = 0.5, + ) -> Iterator[LogEntry]: + """Continuously yield new log entries from all services.""" + readers = self._get_readers(services) + + while True: + found_any = False + for reader in readers: + for entry in reader.read_new(): + found_any = True + yield entry + + if not found_any: + time.sleep(poll_interval) + + def _get_readers(self, services: list[str] | None) -> list[LogReader]: + """Get readers for specified services.""" + if services is None: + return list(self._readers.values()) + return [self._readers[s] for s in services if s in self._readers] diff --git a/otdf-local/src/otdf_local/process/manager.py b/otdf-local/src/otdf_local/process/manager.py new file mode 100644 index 00000000..93d23b52 --- /dev/null +++ b/otdf-local/src/otdf_local/process/manager.py @@ -0,0 +1,227 @@ +"""Subprocess lifecycle management.""" + +import os +import signal +import subprocess +from dataclasses import dataclass, field +from pathlib import Path +from typing import IO + + +@dataclass +class ManagedProcess: + """A managed subprocess with metadata.""" + + name: str + process: subprocess.Popen + log_file: Path | None = None + pid_file: Path | None = None + _log_handle: IO | None = field(default=None, repr=False) + + @property + def pid(self) -> int: + """Process ID.""" + return self.process.pid + + @property + def running(self) -> bool: + """Check if process is still running.""" + return self.process.poll() is None + + @property + def return_code(self) -> int | None: + """Return code if process has exited.""" + return self.process.poll() + + def stop(self, timeout: float = 10.0) -> int | None: + """Stop the process gracefully, then forcefully if needed. + + Args: + timeout: Seconds to wait for graceful shutdown + + Returns: + Process return code + """ + if not self.running: + return self.return_code + + # Try graceful shutdown first + self.process.terminate() + try: + self.process.wait(timeout=timeout) + except subprocess.TimeoutExpired: + # Force kill + self.process.kill() + self.process.wait(timeout=5) + + # Clean up PID file + if self.pid_file and self.pid_file.exists(): + self.pid_file.unlink() + + # Close log handle + if self._log_handle: + self._log_handle.close() + + return self.return_code + + def kill(self) -> None: + """Forcefully kill the process.""" + if self.running: + self.process.kill() + self.process.wait() + + if self.pid_file and self.pid_file.exists(): + self.pid_file.unlink() + + if self._log_handle: + self._log_handle.close() + + +class ProcessManager: + """Manages multiple subprocesses.""" + + def __init__(self) -> None: + self._processes: dict[str, ManagedProcess] = {} + + def start( + self, + name: str, + cmd: list[str], + cwd: Path | None = None, + env: dict[str, str] | None = None, + log_file: Path | None = None, + pid_file: Path | None = None, + ) -> ManagedProcess: + """Start a new managed process. + + Args: + name: Unique name for this process + cmd: Command and arguments + cwd: Working directory + env: Additional environment variables + log_file: File to redirect stdout/stderr to + pid_file: File to write PID to + + Returns: + ManagedProcess instance + """ + # Stop existing process with same name + if name in self._processes: + self._processes[name].stop() + + # Prepare environment + process_env = os.environ.copy() + if env: + process_env.update(env) + + # Open log file if specified + log_handle = None + if log_file: + log_file.parent.mkdir(parents=True, exist_ok=True) + log_handle = open(log_file, "w") + + # Start process + process = subprocess.Popen( + cmd, + cwd=cwd, + env=process_env, + stdout=log_handle or subprocess.DEVNULL, + stderr=subprocess.STDOUT if log_handle else subprocess.DEVNULL, + start_new_session=True, # Detach from parent + ) + + # Write PID file + if pid_file: + pid_file.parent.mkdir(parents=True, exist_ok=True) + pid_file.write_text(str(process.pid)) + + managed = ManagedProcess( + name=name, + process=process, + log_file=log_file, + pid_file=pid_file, + _log_handle=log_handle, + ) + self._processes[name] = managed + return managed + + def get(self, name: str) -> ManagedProcess | None: + """Get a managed process by name.""" + return self._processes.get(name) + + def stop(self, name: str, timeout: float = 10.0) -> int | None: + """Stop a process by name. + + Returns: + Process return code, or None if not found + """ + if name not in self._processes: + return None + result = self._processes[name].stop(timeout) + del self._processes[name] + return result + + def stop_all(self, timeout: float = 10.0) -> None: + """Stop all managed processes.""" + for name in list(self._processes.keys()): + self.stop(name, timeout) + + def running(self) -> list[str]: + """Get names of all running processes.""" + return [name for name, proc in self._processes.items() if proc.running] + + def __contains__(self, name: str) -> bool: + return name in self._processes + + +def kill_process_on_port(port: int) -> bool: + """Kill any process listening on the specified port. + + Args: + port: Port number + + Returns: + True if a process was killed + """ + try: + # Use lsof to find process on port + result = subprocess.run( + ["lsof", "-ti", f":{port}"], + capture_output=True, + text=True, + ) + if result.returncode != 0 or not result.stdout.strip(): + return False + + pids = result.stdout.strip().split("\n") + for pid in pids: + try: + os.kill(int(pid), signal.SIGKILL) + except (ProcessLookupError, ValueError): + pass + return True + except FileNotFoundError: + # lsof not available + return False + + +def find_pid_by_name(pattern: str) -> list[int]: + """Find process IDs matching a pattern. + + Args: + pattern: Pattern to match against process command line + + Returns: + List of matching PIDs + """ + try: + result = subprocess.run( + ["pgrep", "-f", pattern], + capture_output=True, + text=True, + ) + if result.returncode != 0 or not result.stdout.strip(): + return [] + return [int(pid) for pid in result.stdout.strip().split("\n")] + except (FileNotFoundError, ValueError): + return [] diff --git a/otdf-local/src/otdf_local/services/__init__.py b/otdf-local/src/otdf_local/services/__init__.py new file mode 100644 index 00000000..c3d9dabe --- /dev/null +++ b/otdf-local/src/otdf_local/services/__init__.py @@ -0,0 +1,27 @@ +"""Service management module for otdf-local.""" + +from otdf_local.services.base import Service, ServiceInfo, ServiceType +from otdf_local.services.docker import DockerService, get_docker_service +from otdf_local.services.kas import KASManager, KASService, get_kas_manager +from otdf_local.services.platform import PlatformService, get_platform_service +from otdf_local.services.provisioner import ( + Provisioner, + ProvisionResult, + get_provisioner, +) + +__all__ = [ + "Service", + "ServiceInfo", + "ServiceType", + "DockerService", + "get_docker_service", + "KASManager", + "KASService", + "get_kas_manager", + "PlatformService", + "get_platform_service", + "Provisioner", + "ProvisionResult", + "get_provisioner", +] diff --git a/otdf-local/src/otdf_local/services/base.py b/otdf-local/src/otdf_local/services/base.py new file mode 100644 index 00000000..aa8f9144 --- /dev/null +++ b/otdf-local/src/otdf_local/services/base.py @@ -0,0 +1,128 @@ +"""Base service class defining the service interface.""" + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum + +from otdf_local.config.settings import Settings + + +class ServiceType(str, Enum): + """Type of service.""" + + DOCKER = "docker" + SUBPROCESS = "subprocess" + + +@dataclass +class ServiceInfo: + """Information about a service.""" + + name: str + port: int + service_type: ServiceType + running: bool + healthy: bool | None = None + pid: int | None = None + health_url: str | None = None + + def to_dict(self) -> dict: + """Convert to dictionary for JSON output.""" + return { + "name": self.name, + "port": self.port, + "type": self.service_type.value, + "running": self.running, + "healthy": self.healthy, + "pid": self.pid, + } + + +class Service(ABC): + """Abstract base class for managed services.""" + + def __init__(self, settings: Settings) -> None: + self.settings = settings + + @property + @abstractmethod + def name(self) -> str: + """Service name.""" + ... + + @property + @abstractmethod + def port(self) -> int: + """Service port.""" + ... + + @property + @abstractmethod + def service_type(self) -> ServiceType: + """Type of service (docker or subprocess).""" + ... + + @property + @abstractmethod + def health_url(self) -> str: + """Health check URL, if applicable.""" + pass + + @abstractmethod + def start(self) -> bool: + """Start the service. + + Returns: + True if started successfully + """ + ... + + @abstractmethod + def stop(self) -> bool: + """Stop the service. + + Returns: + True if stopped successfully + """ + ... + + @abstractmethod + def is_running(self) -> bool: + """Check if the service is currently running.""" + ... + + def is_healthy(self) -> bool | None: + """Check if the service is healthy. + + Returns: + True if healthy, False if unhealthy, None if health check not available + """ + from otdf_local.health.checks import check_http_health + + if not self.is_running(): + return None + + if self.health_url: + return check_http_health(self.health_url) + + return None + + def get_info(self) -> ServiceInfo: + """Get service information.""" + return ServiceInfo( + name=self.name, + port=self.port, + service_type=self.service_type, + running=self.is_running(), + healthy=self.is_healthy(), + health_url=self.health_url, + ) + + def restart(self) -> bool: + """Restart the service. + + Returns: + True if restarted successfully + """ + self.stop() + return self.start() diff --git a/otdf-local/src/otdf_local/services/docker.py b/otdf-local/src/otdf_local/services/docker.py new file mode 100644 index 00000000..40e5ba20 --- /dev/null +++ b/otdf-local/src/otdf_local/services/docker.py @@ -0,0 +1,150 @@ +"""Docker compose service management.""" + +import subprocess + +from otdf_local.config.ports import Ports +from otdf_local.config.settings import Settings +from otdf_local.health.checks import check_http_health, check_port +from otdf_local.services.base import Service, ServiceInfo, ServiceType + + +class DockerService(Service): + """Manages Docker compose services (Keycloak, PostgreSQL).""" + + def __init__(self, settings: Settings) -> None: + super().__init__(settings) + self._compose_file = settings.docker_compose_file + + @property + def name(self) -> str: + return "docker" + + @property + def port(self) -> int: + return Ports.KEYCLOAK # Primary port for status + + @property + def service_type(self) -> ServiceType: + return ServiceType.DOCKER + + @property + def health_url(self) -> str: + return f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master" + + def start(self) -> bool: + """Start Docker compose services.""" + if not self._compose_file.exists(): + return False + + result = subprocess.run( + ["docker", "compose", "-f", str(self._compose_file), "up", "-d"], + capture_output=True, + text=True, + cwd=self._compose_file.parent, + ) + return result.returncode == 0 + + def stop(self) -> bool: + """Stop Docker compose services.""" + if not self._compose_file.exists(): + return False + + result = subprocess.run( + ["docker", "compose", "-f", str(self._compose_file), "down"], + capture_output=True, + text=True, + cwd=self._compose_file.parent, + ) + return result.returncode == 0 + + def is_running(self) -> bool: + """Check if Docker services are running.""" + # Check if both Keycloak and PostgreSQL ports are open + keycloak_up = check_port("localhost", Ports.KEYCLOAK) + postgres_up = check_port("localhost", Ports.POSTGRES) + return keycloak_up and postgres_up + + def is_healthy(self) -> bool | None: + """Check if Keycloak is responding.""" + if not self.is_running(): + return None + return check_http_health(self.health_url) + + def get_container_status(self) -> dict[str, dict]: + """Get detailed status of each container.""" + if not self._compose_file.exists(): + return {} + + result = subprocess.run( + [ + "docker", + "compose", + "-f", + str(self._compose_file), + "ps", + "--format", + "json", + ], + capture_output=True, + text=True, + cwd=self._compose_file.parent, + ) + + if result.returncode != 0: + return {} + + # Parse JSON output (one object per line) + import json + + containers = {} + for line in result.stdout.strip().split("\n"): + if line: + try: + data = json.loads(line) + containers[data.get("Service", data.get("Name", "unknown"))] = { + "state": data.get("State", "unknown"), + "status": data.get("Status", "unknown"), + "health": data.get("Health", ""), + } + except json.JSONDecodeError: + pass + + return containers + + def get_all_info(self) -> list[ServiceInfo]: + """Get info for all Docker services.""" + keycloak_running = check_port("localhost", Ports.KEYCLOAK) + postgres_running = check_port("localhost", Ports.POSTGRES) + + keycloak_healthy = None + if keycloak_running: + keycloak_healthy = check_http_health( + f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master" + ) + + return [ + ServiceInfo( + name="keycloak", + port=Ports.KEYCLOAK, + service_type=ServiceType.DOCKER, + running=keycloak_running, + healthy=keycloak_healthy, + health_url=f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master", + ), + ServiceInfo( + name="postgres", + port=Ports.POSTGRES, + service_type=ServiceType.DOCKER, + running=postgres_running, + healthy=None, # No HTTP health check for postgres + ), + ] + + +def get_docker_service(settings: Settings | None = None) -> DockerService: + """Get a DockerService instance.""" + if settings is None: + from otdf_local.config.settings import get_settings + + settings = get_settings() + return DockerService(settings) diff --git a/otdf-local/src/otdf_local/services/kas.py b/otdf-local/src/otdf_local/services/kas.py new file mode 100644 index 00000000..0b7adfa6 --- /dev/null +++ b/otdf-local/src/otdf_local/services/kas.py @@ -0,0 +1,227 @@ +"""KAS (Key Access Service) instance management.""" + +from pathlib import Path + +from otdf_local.config.features import PlatformFeatures +from otdf_local.config.ports import Ports +from otdf_local.config.settings import Settings +from otdf_local.health.checks import check_http_health, check_port +from otdf_local.process.manager import ( + ManagedProcess, + ProcessManager, + kill_process_on_port, +) +from otdf_local.services.base import Service, ServiceInfo, ServiceType +from otdf_local.utils.yaml import copy_yaml_with_updates, get_nested, load_yaml + + +class KASService(Service): + """Manages a single KAS instance.""" + + def __init__( + self, + settings: Settings, + kas_name: str, + process_manager: ProcessManager | None = None, + ) -> None: + super().__init__(settings) + self._kas_name = kas_name + self._process_manager = process_manager or ProcessManager() + self._process: ManagedProcess | None = None + + @property + def name(self) -> str: + return f"kas-{self._kas_name}" + + @property + def port(self) -> int: + return Ports.get_kas_port(self._kas_name) + + @property + def service_type(self) -> ServiceType: + return ServiceType.SUBPROCESS + + @property + def health_url(self) -> str: + return f"http://localhost:{self.port}/healthz" + + @property + def is_key_management(self) -> bool: + """Check if this is a key management KAS instance.""" + return Ports.is_km_kas(self._kas_name) + + def _generate_config(self) -> Path: + """Generate the KAS config file from template.""" + config_path = self.settings.get_kas_config_path(self._kas_name) + template_path = self.settings.kas_template_config + + # Load platform config to get root_key + platform_config = load_yaml(self.settings.platform_config) + root_key = get_nested(platform_config, "services.kas.root_key", "") + + # Detect platform features to determine supported config options + features = PlatformFeatures.detect(self.settings.platform_dir) + + # Use stderr if supported, otherwise stdout (v0.9.0 only supports stdout) + logger_output = "stderr" if features.supports("logger_stderr") else "stdout" + + # Base updates for all KAS instances + updates = { + "logger.type": "json", + "logger.output": logger_output, + "server.port": self.port, + "services.kas.root_key": root_key, + } + + # Key management KAS instances need additional config + if self.is_key_management: + updates["services.kas.preview.key_management"] = True + updates["services.kas.preview.ec_tdf_enabled"] = True + # registered_kas_uri should NOT have /kas suffix + updates["services.kas.registered_kas_uri"] = f"http://localhost:{self.port}" + + copy_yaml_with_updates(template_path, config_path, updates) + return config_path + + def start(self) -> bool: + """Start the KAS instance.""" + # Ensure directories exist + self.settings.ensure_directories() + + # Kill any existing process on the port + kill_process_on_port(self.port) + + # Generate config + config_path = self._generate_config() + + # Build the command + cmd = [ + "go", + "run", + "./service", + "start", + "--config-file", + str(config_path), + ] + + # Start the process + log_file = self.settings.get_kas_log_path(self._kas_name) + + self._process = self._process_manager.start( + name=self.name, + cmd=cmd, + cwd=self.settings.platform_dir, + log_file=log_file, + env={"OPENTDF_LOG_LEVEL": "info"}, + ) + + return self._process is not None + + def stop(self) -> bool: + """Stop the KAS instance.""" + if self._process: + self._process.stop() + self._process = None + + # Also kill any processes on the port + kill_process_on_port(self.port) + return True + + def is_running(self) -> bool: + """Check if the KAS instance is running.""" + if self._process and self._process.running: + return True + return check_port("localhost", self.port) + + def is_healthy(self) -> bool | None: + """Check if the KAS instance is healthy.""" + if not self.is_running(): + return None + return check_http_health(self.health_url) + + def get_info(self) -> ServiceInfo: + """Get service information.""" + info = super().get_info() + if self._process: + info.pid = self._process.pid + return info + + +class KASManager: + """Manages all KAS instances.""" + + def __init__( + self, + settings: Settings, + process_manager: ProcessManager | None = None, + ) -> None: + self.settings = settings + self._process_manager = process_manager or ProcessManager() + self._instances: dict[str, KASService] = {} + + # Create instances for all configured KAS + for kas_name in Ports.all_kas_names(): + self._instances[kas_name] = KASService( + settings, kas_name, self._process_manager + ) + + def get(self, name: str) -> KASService | None: + """Get a KAS instance by name.""" + return self._instances.get(name) + + def start_all(self) -> dict[str, bool]: + """Start all KAS instances.""" + results = {} + for name, instance in self._instances.items(): + results[name] = instance.start() + return results + + def stop_all(self) -> dict[str, bool]: + """Stop all KAS instances.""" + results = {} + for name, instance in self._instances.items(): + results[name] = instance.stop() + return results + + def start_standard(self) -> dict[str, bool]: + """Start only standard (non-km) KAS instances.""" + results = {} + for name in Ports.standard_kas_names(): + results[name] = self._instances[name].start() + return results + + def start_km(self) -> dict[str, bool]: + """Start only key management KAS instances.""" + results = {} + for name in Ports.km_kas_names(): + results[name] = self._instances[name].start() + return results + + def get_all_info(self) -> list[ServiceInfo]: + """Get info for all KAS instances.""" + return [instance.get_info() for instance in self._instances.values()] + + def get_running(self) -> list[str]: + """Get names of running KAS instances.""" + return [name for name, inst in self._instances.items() if inst.is_running()] + + def __iter__(self): + return iter(self._instances.values()) + + +# Global process manager for singleton behavior +_kas_process_manager: ProcessManager | None = None + + +def get_kas_manager(settings: Settings | None = None) -> KASManager: + """Get a KASManager instance with shared process manager.""" + global _kas_process_manager + if _kas_process_manager is None: + _kas_process_manager = ProcessManager() + + if settings is None: + from otdf_local.config.settings import get_settings + + settings = get_settings() + + return KASManager(settings, _kas_process_manager) diff --git a/otdf-local/src/otdf_local/services/platform.py b/otdf-local/src/otdf_local/services/platform.py new file mode 100644 index 00000000..15f7f4e5 --- /dev/null +++ b/otdf-local/src/otdf_local/services/platform.py @@ -0,0 +1,186 @@ +"""Platform service management.""" + +from pathlib import Path + +from otdf_local.config.features import PlatformFeatures +from otdf_local.config.ports import Ports +from otdf_local.config.settings import Settings +from otdf_local.health.checks import check_http_health, check_port +from otdf_local.process.manager import ( + ManagedProcess, + ProcessManager, + kill_process_on_port, +) +from otdf_local.services.base import Service, ServiceInfo, ServiceType +from otdf_local.utils.keys import get_golden_keyring_entries, setup_golden_keys +from otdf_local.utils.yaml import ( + append_to_list, + copy_yaml_with_updates, + load_yaml, + save_yaml, +) + + +class PlatformService(Service): + """Manages the OpenTDF platform service.""" + + def __init__( + self, + settings: Settings, + process_manager: ProcessManager | None = None, + ) -> None: + super().__init__(settings) + self._process_manager = process_manager or ProcessManager() + self._process: ManagedProcess | None = None + + @property + def name(self) -> str: + return "platform" + + @property + def port(self) -> int: + return Ports.PLATFORM + + @property + def service_type(self) -> ServiceType: + return ServiceType.SUBPROCESS + + @property + def health_url(self) -> str: + return f"http://localhost:{self.port}/healthz" + + def _generate_config(self) -> Path: + """Generate the platform config file from template.""" + config_path = self.settings.platform_config + template_path = self.settings.platform_template_config + + # Detect platform features to determine supported config options + features = PlatformFeatures.detect(self.settings.platform_dir) + + # Use stderr if supported, otherwise stdout (v0.9.0 only supports stdout) + logger_output = "stderr" if features.supports("logger_stderr") else "stdout" + + # Updates for platform config + updates = { + "logger.level": "debug", + "logger.type": "json", + "logger.output": logger_output, + } + + copy_yaml_with_updates(template_path, config_path, updates) + + # Set up golden keys for legacy TDF tests + self._setup_golden_keys(config_path) + + return config_path + + def _setup_golden_keys(self, config_path: Path) -> None: + """Add golden keys to platform config for legacy TDF decryption. + + Extracts keys from extra-keys.json and adds them to the platform config + so legacy golden TDFs can be decrypted. + """ + # Set up golden key files and get their config entries + golden_keys = setup_golden_keys( + self.settings.xtest_root, + self.settings.platform_dir, + ) + + if not golden_keys: + return + + # Load config, append golden keys, and save + data = load_yaml(config_path) + + # Add keys to cryptoProvider.standard.keys + append_to_list(data, "server.cryptoProvider.standard.keys", golden_keys) + + # Add keyring entries for legacy decryption + keyring_entries = get_golden_keyring_entries() + append_to_list(data, "services.kas.keyring", keyring_entries) + + save_yaml(config_path, data) + + def start(self) -> bool: + """Start the platform service.""" + # Ensure directories exist + self.settings.ensure_directories() + + # Kill any existing process on the port + kill_process_on_port(self.port) + + # Generate config + config_path = self._generate_config() + + # Build the command + cmd = [ + "go", + "run", + "./service", + "start", + "--config-file", + str(config_path), + ] + + # Start the process + log_file = self.settings.logs_dir / "platform.log" + + self._process = self._process_manager.start( + name=self.name, + cmd=cmd, + cwd=self.settings.platform_dir, + log_file=log_file, + env={"OPENTDF_LOG_LEVEL": "info"}, + ) + + return self._process is not None + + def stop(self) -> bool: + """Stop the platform service.""" + if self._process: + self._process.stop() + self._process = None + + # Also kill any processes on the port + kill_process_on_port(self.port) + return True + + def is_running(self) -> bool: + """Check if the platform is running.""" + # First check our managed process + if self._process and self._process.running: + return True + + # Fall back to port check (may have been started externally) + return check_port("localhost", self.port) + + def is_healthy(self) -> bool | None: + """Check if the platform is healthy.""" + if not self.is_running(): + return None + return check_http_health(self.health_url) + + def get_info(self) -> ServiceInfo: + """Get service information.""" + info = super().get_info() + if self._process: + info.pid = self._process.pid + return info + + +# Global process manager for singleton behavior +_process_manager: ProcessManager | None = None + + +def get_platform_service(settings: Settings | None = None) -> PlatformService: + """Get a PlatformService instance with shared process manager.""" + global _process_manager + if _process_manager is None: + _process_manager = ProcessManager() + + if settings is None: + from otdf_local.config.settings import get_settings + + settings = get_settings() + + return PlatformService(settings, _process_manager) diff --git a/otdf-local/src/otdf_local/services/provisioner.py b/otdf-local/src/otdf_local/services/provisioner.py new file mode 100644 index 00000000..3e69ab6b --- /dev/null +++ b/otdf-local/src/otdf_local/services/provisioner.py @@ -0,0 +1,148 @@ +"""Provisioning service for Keycloak and fixtures.""" + +import subprocess +from dataclasses import dataclass + +from otdf_local.config.settings import Settings + + +@dataclass +class ProvisionResult: + """Result of a provisioning operation.""" + + success: bool + error_message: str | None = None + stdout: str = "" + stderr: str = "" + return_code: int = 0 + + def __bool__(self) -> bool: + """Allow boolean checks for backward compatibility.""" + return self.success + + +class Provisioner: + """Handles provisioning of Keycloak and test fixtures.""" + + def __init__(self, settings: Settings) -> None: + self.settings = settings + + def provision_all(self) -> ProvisionResult: + """Run all provisioning steps.""" + keycloak_result = self.provision_keycloak() + fixtures_result = self.provision_fixtures() + + # If both succeeded, return success + if keycloak_result and fixtures_result: + return ProvisionResult(success=True) + + # Otherwise, return failure with first error + if not keycloak_result: + return keycloak_result + return fixtures_result + + def provision_keycloak(self) -> ProvisionResult: + """Provision Keycloak with required configuration. + + This runs the provision-keycloak script to set up: + - Realm configuration + - Client credentials + - Users for testing + """ + return self._provision_("keycloak") + + def provision_fixtures(self) -> ProvisionResult: + """Provision test fixtures. + + This runs the provision-fixtures script to set up: + - Attributes + - Entitlements + - KAS registrations + """ + return self._provision_("fixtures") + + def _provision_(self, mode: str) -> ProvisionResult: + """Execute a provisioning operation. + + Args: + mode: The provisioning mode ("keycloak" or "fixtures") + + Returns: + ProvisionResult with success status and error details + """ + cmd = [ + "go", + "run", + "./service", + "provision", + mode, + "--config-file", + str(self.settings.platform_config), + ] + + result = subprocess.run( + cmd, + capture_output=True, + text=True, + cwd=self.settings.platform_dir, + ) + + # If the provision command doesn't exist, that's okay + if result.returncode != 0 and "unknown command" in result.stderr.lower(): + return ProvisionResult( + success=True, + error_message="Provisioning command not available on this platform version", + stdout=result.stdout, + stderr=result.stderr, + return_code=result.returncode, + ) + + # If provisioning failed, extract error message from stderr + if result.returncode != 0: + error_lines = result.stderr.strip().split("\n") + error_message = error_lines[-1] if error_lines else "Unknown error" + return ProvisionResult( + success=False, + error_message=error_message, + stdout=result.stdout, + stderr=result.stderr, + return_code=result.returncode, + ) + + # Success + return ProvisionResult( + success=True, + stdout=result.stdout, + stderr=result.stderr, + return_code=result.returncode, + ) + + def register_kas_instances(self) -> bool: + """Register all KAS instances with the platform. + + This ensures the platform knows about all the KAS instances + for autoconfigure/ABAC tests. + """ + + # For each KAS, register it with the platform's KAS registry + # This typically requires an admin token and API calls + # For now, we rely on fixtures to do this + + # The fixtures should register KAS instances at: + # - http://localhost:8181/kas (alpha) + # - http://localhost:8282/kas (beta) + # - http://localhost:8383/kas (gamma) + # - http://localhost:8484/kas (delta) + # - http://localhost:8585 (km1 - no /kas suffix for key management) + # - http://localhost:8686 (km2 - no /kas suffix for key management) + + return True + + +def get_provisioner(settings: Settings | None = None) -> Provisioner: + """Get a Provisioner instance.""" + if settings is None: + from otdf_local.config.settings import get_settings + + settings = get_settings() + return Provisioner(settings) diff --git a/otdf-local/src/otdf_local/utils/__init__.py b/otdf-local/src/otdf_local/utils/__init__.py new file mode 100644 index 00000000..ba47b61d --- /dev/null +++ b/otdf-local/src/otdf_local/utils/__init__.py @@ -0,0 +1 @@ +"""Utility module for otdf-local.""" diff --git a/otdf-local/src/otdf_local/utils/console.py b/otdf-local/src/otdf_local/utils/console.py new file mode 100644 index 00000000..b3ca19cb --- /dev/null +++ b/otdf-local/src/otdf_local/utils/console.py @@ -0,0 +1,89 @@ +"""Rich console helpers for formatted output.""" + +from collections.abc import Iterator +from contextlib import contextmanager + +from rich.console import Console +from rich.panel import Panel +from rich.progress import Progress, SpinnerColumn, TextColumn, TimeElapsedColumn +from rich.style import Style +from rich.table import Table + +# Global console instance +console = Console() + +# Status styles +STYLE_SUCCESS = Style(color="green", bold=True) +STYLE_ERROR = Style(color="red", bold=True) +STYLE_WARNING = Style(color="yellow") +STYLE_INFO = Style(color="blue") +STYLE_DIM = Style(dim=True) + + +def print_success(message: str) -> None: + """Print a success message.""" + console.print(f"[green]✓[/green] {message}") + + +def print_error(message: str) -> None: + """Print an error message.""" + console.print(f"[red]✗[/red] {message}") + + +def print_warning(message: str) -> None: + """Print a warning message.""" + console.print(f"[yellow]![/yellow] {message}") + + +def print_info(message: str) -> None: + """Print an info message.""" + console.print(f"[blue]ℹ[/blue] {message}") + + +def print_header(title: str) -> None: + """Print a section header.""" + console.print(Panel(title, style="bold cyan", expand=False)) + + +@contextmanager +def status_spinner(message: str) -> Iterator[None]: + """Context manager for showing a spinner during operations.""" + with console.status(f"[bold blue]{message}[/bold blue]"): + yield + + +def create_service_table() -> Table: + """Create a table for displaying service status.""" + table = Table(show_header=True, header_style="bold magenta") + table.add_column("Service", style="cyan", no_wrap=True) + table.add_column("Port", justify="right") + table.add_column("Type", style="dim") + table.add_column("Status", justify="center") + table.add_column("Health", justify="center") + return table + + +def format_status(running: bool) -> str: + """Format a running status indicator.""" + if running: + return "[green]●[/green] running" + return "[red]○[/red] stopped" + + +def format_health(healthy: bool | None) -> str: + """Format a health status indicator.""" + if healthy is None: + return "[dim]—[/dim]" + if healthy: + return "[green]✓[/green]" + return "[red]✗[/red]" + + +def create_progress() -> Progress: + """Create a progress display for multi-step operations.""" + return Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + TimeElapsedColumn(), + console=console, + ) diff --git a/otdf-local/src/otdf_local/utils/keys.py b/otdf-local/src/otdf_local/utils/keys.py new file mode 100644 index 00000000..eadf0b30 --- /dev/null +++ b/otdf-local/src/otdf_local/utils/keys.py @@ -0,0 +1,209 @@ +"""Cryptographic key generation utilities.""" + +import json +import secrets +import subprocess +from pathlib import Path + + +def generate_root_key() -> str: + """Generate a random 256-bit root key as hex string.""" + return secrets.token_hex(32) + + +def generate_rsa_keypair(key_dir: Path, name: str = "kas") -> tuple[Path, Path]: + """Generate an RSA keypair for KAS. + + Args: + key_dir: Directory to store keys + name: Base name for key files + + Returns: + Tuple of (private_key_path, public_key_path) + """ + key_dir.mkdir(parents=True, exist_ok=True) + private_key = key_dir / f"{name}-private.pem" + public_key = key_dir / f"{name}-cert.pem" + + # Generate private key + subprocess.run( + [ + "openssl", + "genpkey", + "-algorithm", + "RSA", + "-out", + str(private_key), + "-pkeyopt", + "rsa_keygen_bits:2048", + ], + check=True, + capture_output=True, + ) + + # Generate self-signed certificate + subprocess.run( + [ + "openssl", + "req", + "-new", + "-x509", + "-key", + str(private_key), + "-out", + str(public_key), + "-days", + "365", + "-subj", + "/CN=kas/O=OpenTDF", + ], + check=True, + capture_output=True, + ) + + return private_key, public_key + + +def generate_ec_keypair(key_dir: Path, name: str = "kas-ec") -> tuple[Path, Path]: + """Generate an EC keypair for KAS. + + Args: + key_dir: Directory to store keys + name: Base name for key files + + Returns: + Tuple of (private_key_path, public_key_path) + """ + key_dir.mkdir(parents=True, exist_ok=True) + private_key = key_dir / f"{name}-private.pem" + public_key = key_dir / f"{name}-cert.pem" + + # Generate EC private key + subprocess.run( + [ + "openssl", + "ecparam", + "-name", + "prime256v1", + "-genkey", + "-noout", + "-out", + str(private_key), + ], + check=True, + capture_output=True, + ) + + # Generate self-signed certificate + subprocess.run( + [ + "openssl", + "req", + "-new", + "-x509", + "-key", + str(private_key), + "-out", + str(public_key), + "-days", + "365", + "-subj", + "/CN=kas-ec/O=OpenTDF", + ], + check=True, + capture_output=True, + ) + + return private_key, public_key + + +def ensure_keys_exist(key_dir: Path, force: bool = False) -> bool: + """Ensure all required keys exist, generating if needed. + + Args: + key_dir: Directory for key storage + force: If True, regenerate keys even if they exist + + Returns: + True if keys were generated, False if they already existed + """ + rsa_private = key_dir / "kas-private.pem" + ec_private = key_dir / "kas-ec-private.pem" + + if not force and rsa_private.exists() and ec_private.exists(): + return False + + generate_rsa_keypair(key_dir, "kas") + generate_ec_keypair(key_dir, "kas-ec") + return True + + +def setup_golden_keys( + xtest_root: Path, + platform_dir: Path, +) -> list[dict]: + """Extract and install golden keys for legacy TDF decryption. + + Reads extra-keys.json from the xtest directory and copies the key files + to the platform directory for use by the platform service. + + Args: + xtest_root: Root directory of xtest (contains xtest/extra-keys.json) + platform_dir: Platform source directory + + Returns: + List of key configurations to add to cryptoProvider.standard.keys + """ + extra_keys_file = xtest_root / "xtest" / "extra-keys.json" + if not extra_keys_file.exists(): + return [] + + with open(extra_keys_file) as f: + extra_keys = json.load(f) + + keys_config = [] + for key_entry in extra_keys: + kid = key_entry.get("kid", "") + alg = key_entry.get("alg", "") + private_key = key_entry.get("privateKey", "") + cert = key_entry.get("cert", "") + + if not all([kid, alg, private_key, cert]): + continue + + # Map algorithm to platform format + if alg == "rsa:2048": + platform_alg = "rsa:2048" + elif alg == "ec:secp256r1": + platform_alg = "ec:secp256r1" + else: + continue + + # Write key files to platform directory + private_path = platform_dir / f"{kid}-private.pem" + cert_path = platform_dir / f"{kid}-cert.pem" + + private_path.write_text(private_key) + cert_path.write_text(cert) + + keys_config.append( + { + "kid": kid, + "alg": platform_alg, + "private": f"{kid}-private.pem", + "cert": f"{kid}-cert.pem", + } + ) + + return keys_config + + +def get_golden_keyring_entries() -> list[dict]: + """Get keyring entries for golden keys. + + Returns: + List of keyring entries to add to services.kas.keyring + """ + return [ + {"kid": "golden-r1", "alg": "rsa:2048", "legacy": True}, + ] diff --git a/otdf-local/src/otdf_local/utils/yaml.py b/otdf-local/src/otdf_local/utils/yaml.py new file mode 100644 index 00000000..a71653a4 --- /dev/null +++ b/otdf-local/src/otdf_local/utils/yaml.py @@ -0,0 +1,124 @@ +"""YAML manipulation utilities using ruamel.yaml.""" + +from pathlib import Path +from typing import Any + +from ruamel.yaml import YAML + +# Configure YAML instance to preserve formatting +_yaml = YAML() +_yaml.preserve_quotes = True +_yaml.width = 120 + + +def load_yaml(path: Path) -> dict[str, Any]: + """Load a YAML file, preserving comments and formatting.""" + with open(path) as f: + return _yaml.load(f) or {} + + +def save_yaml(path: Path, data: dict[str, Any]) -> None: + """Save data to a YAML file, preserving formatting.""" + with open(path, "w") as f: + _yaml.dump(data, f) + + +def get_nested(data: dict[str, Any], path: str, default: Any = None) -> Any: + """Get a nested value from a dict using dot notation. + + Args: + data: The dictionary to search + path: Dot-separated path (e.g., "services.kas.root_key") + default: Value to return if path not found + + Returns: + The value at the path, or default if not found + """ + keys = path.split(".") + current = data + for key in keys: + if not isinstance(current, dict) or key not in current: + return default + current = current[key] + return current + + +def set_nested(data: dict[str, Any], path: str, value: Any) -> None: + """Set a nested value in a dict using dot notation. + + Args: + data: The dictionary to modify + path: Dot-separated path (e.g., "services.kas.root_key") + value: The value to set + + Creates intermediate dicts as needed. + """ + keys = path.split(".") + current = data + for key in keys[:-1]: + if key not in current: + current[key] = {} + current = current[key] + current[keys[-1]] = value + + +def update_yaml_file(path: Path, updates: dict[str, Any]) -> None: + """Load a YAML file, apply updates, and save. + + Args: + path: Path to the YAML file + updates: Dict of dot-notation paths to values + + Example: + update_yaml_file( + config_path, + {"server.port": 8080, "services.kas.root_key": "abc123"} + ) + """ + data = load_yaml(path) + for dot_path, value in updates.items(): + set_nested(data, dot_path, value) + save_yaml(path, data) + + +def append_to_list(data: dict[str, Any], path: str, items: list[Any]) -> None: + """Append items to a list at a nested path. + + Args: + data: The dictionary to modify + path: Dot-separated path to the list + items: Items to append + """ + current_list = get_nested(data, path, []) + if not isinstance(current_list, list): + current_list = [] + + # Check for duplicates by kid if items are dicts with kid + existing_kids = { + item.get("kid") + for item in current_list + if isinstance(item, dict) and "kid" in item + } + for item in items: + if isinstance(item, dict) and "kid" in item: + if item["kid"] not in existing_kids: + current_list.append(item) + existing_kids.add(item["kid"]) + else: + current_list.append(item) + + set_nested(data, path, current_list) + + +def copy_yaml_with_updates(source: Path, dest: Path, updates: dict[str, Any]) -> None: + """Copy a YAML file and apply updates to the copy. + + Args: + source: Source YAML file path + dest: Destination YAML file path + updates: Dict of dot-notation paths to values + """ + data = load_yaml(source) + for dot_path, value in updates.items(): + set_nested(data, dot_path, value) + save_yaml(dest, data) diff --git a/otdf-local/tests/__init__.py b/otdf-local/tests/__init__.py new file mode 100644 index 00000000..62f82fa1 --- /dev/null +++ b/otdf-local/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for otdf-local.""" diff --git a/otdf-local/tests/test_health.py b/otdf-local/tests/test_health.py new file mode 100644 index 00000000..c455cab8 --- /dev/null +++ b/otdf-local/tests/test_health.py @@ -0,0 +1,186 @@ +"""Tests for health check utilities.""" + +import threading +from http.server import BaseHTTPRequestHandler, HTTPServer + +import pytest +from otdf_local.health.checks import check_http_health, check_port, get_service_status +from otdf_local.health.waits import ( + WaitTimeoutError, + wait_for_condition, + wait_for_health, + wait_for_port, +) + + +class SimpleHealthHandler(BaseHTTPRequestHandler): + """Simple HTTP handler for testing.""" + + def do_GET(self): + if self.path == "/healthz": + self.send_response(200) + self.end_headers() + self.wfile.write(b"OK") + elif self.path == "/unhealthy": + self.send_response(500) + self.end_headers() + self.wfile.write(b"Error") + else: + self.send_response(404) + self.end_headers() + + def log_message(self, format, *args): + pass # Suppress logging + + +@pytest.fixture +def health_server(): + """Start a simple HTTP server for health check testing.""" + server = HTTPServer(("127.0.0.1", 0), SimpleHealthHandler) + port = server.server_address[1] + + thread = threading.Thread(target=server.serve_forever) + thread.daemon = True + thread.start() + + yield port + + server.shutdown() + + +class TestCheckPort: + """Tests for check_port function.""" + + def test_open_port(self, health_server): + """Test that check_port returns True for open port.""" + assert check_port("127.0.0.1", health_server) is True + + def test_closed_port(self): + """Test that check_port returns False for closed port.""" + # Port 1 is unlikely to be open + assert check_port("127.0.0.1", 59999, timeout=0.1) is False + + def test_invalid_host(self): + """Test that check_port returns False for invalid host.""" + assert check_port("invalid.host.example", 8080, timeout=0.1) is False + + +class TestCheckHttpHealth: + """Tests for check_http_health function.""" + + def test_healthy_endpoint(self, health_server): + """Test healthy endpoint returns True.""" + url = f"http://127.0.0.1:{health_server}/healthz" + assert check_http_health(url) is True + + def test_unhealthy_endpoint(self, health_server): + """Test unhealthy endpoint returns False.""" + url = f"http://127.0.0.1:{health_server}/unhealthy" + assert check_http_health(url) is False + + def test_nonexistent_endpoint(self, health_server): + """Test nonexistent endpoint returns False.""" + url = f"http://127.0.0.1:{health_server}/nonexistent" + assert check_http_health(url) is False + + def test_connection_refused(self): + """Test connection refused returns False.""" + url = "http://127.0.0.1:59999/healthz" + assert check_http_health(url, timeout=0.1) is False + + def test_custom_expected_status(self, health_server): + """Test custom expected status.""" + url = f"http://127.0.0.1:{health_server}/unhealthy" + assert check_http_health(url, expected_status=500) is True + assert check_http_health(url, expected_status=(500, 503)) is True + + +class TestGetServiceStatus: + """Tests for get_service_status function.""" + + def test_running_service(self, health_server): + """Test running service returns 'running'.""" + url = f"http://127.0.0.1:{health_server}/healthz" + status = get_service_status(health_server, url) + assert status == "running" + + def test_unhealthy_service(self, health_server): + """Test unhealthy service returns 'unhealthy'.""" + url = f"http://127.0.0.1:{health_server}/unhealthy" + status = get_service_status(health_server, url) + assert status == "unhealthy" + + def test_stopped_service(self): + """Test stopped service returns 'stopped'.""" + status = get_service_status(59999) + assert status == "stopped" + + def test_no_health_url(self, health_server): + """Test service without health URL returns 'running' if port open.""" + status = get_service_status(health_server, None) + assert status == "running" + + +class TestWaitForPort: + """Tests for wait_for_port function.""" + + def test_port_already_open(self, health_server): + """Test returns immediately when port is already open.""" + assert wait_for_port(health_server, timeout=1.0) is True + + def test_timeout_on_closed_port(self): + """Test raises WaitTimeoutError when port stays closed.""" + with pytest.raises(WaitTimeoutError) as exc_info: + wait_for_port(59999, timeout=0.1, poll_interval=0.05) + assert "59999" not in str(exc_info.value) # Uses service_name + assert "0.1s" in str(exc_info.value) + + +class TestWaitForHealth: + """Tests for wait_for_health function.""" + + def test_already_healthy(self, health_server): + """Test returns immediately when already healthy.""" + url = f"http://127.0.0.1:{health_server}/healthz" + assert wait_for_health(url, timeout=1.0) is True + + def test_timeout_on_unhealthy(self, health_server): + """Test raises WaitTimeoutError when stays unhealthy.""" + url = f"http://127.0.0.1:{health_server}/unhealthy" + with pytest.raises(WaitTimeoutError): + wait_for_health(url, timeout=0.2, poll_interval=0.05) + + +class TestWaitForCondition: + """Tests for wait_for_condition function.""" + + def test_condition_already_true(self): + """Test returns immediately when condition is true.""" + assert wait_for_condition(lambda: True, timeout=1.0) is True + + def test_condition_becomes_true(self): + """Test waits for condition to become true.""" + counter = {"value": 0} + + def condition(): + counter["value"] += 1 + return counter["value"] >= 3 + + assert wait_for_condition(condition, timeout=1.0, poll_interval=0.05) is True + + def test_timeout_when_condition_stays_false(self): + """Test raises WaitTimeoutError when condition stays false.""" + with pytest.raises(WaitTimeoutError): + wait_for_condition(lambda: False, timeout=0.1, poll_interval=0.05) + + def test_handles_exception_in_condition(self): + """Test treats exceptions as 'not ready'.""" + counter = {"value": 0} + + def condition(): + counter["value"] += 1 + if counter["value"] < 3: + raise RuntimeError("Not ready") + return True + + assert wait_for_condition(condition, timeout=1.0, poll_interval=0.05) is True diff --git a/otdf-local/tests/test_integration.py b/otdf-local/tests/test_integration.py new file mode 100644 index 00000000..90bea2a1 --- /dev/null +++ b/otdf-local/tests/test_integration.py @@ -0,0 +1,92 @@ +"""Integration tests for otdf-local CLI. + +These tests require Docker to be available and will start/stop real services. +They are marked as integration tests and can be skipped with: pytest -m "not integration" +""" + +import subprocess +import time + +import pytest + + +def run_otdf_local(*args, timeout=60) -> subprocess.CompletedProcess: + """Run otdf-local CLI command.""" + cmd = ["python", "-m", "otdf_local", *args] + return subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=timeout, + cwd=str(__file__).rsplit("/tests/", 1)[0], + ) + + +class TestCLIBasic: + """Basic CLI functionality tests.""" + + def test_version(self): + """Test version command.""" + result = run_otdf_local("--version") + assert result.returncode == 0 + assert "otdf-local version" in result.stdout + + def test_help(self): + """Test help output.""" + result = run_otdf_local("--help") + assert result.returncode == 0 + assert "OpenTDF test environment" in result.stdout + + def test_ls_no_services(self): + """Test ls command when no services running.""" + result = run_otdf_local("ls") + # Should succeed even with no services + assert result.returncode == 0 + + def test_ls_json(self): + """Test ls command with JSON output.""" + result = run_otdf_local("ls", "--json", "--all") + assert result.returncode == 0 + # Should be valid JSON + import json + + data = json.loads(result.stdout) + assert isinstance(data, list) + + +class TestServiceLifecycle: + """Service lifecycle tests (start/stop). + + These tests are slower as they actually start services. + """ + + @pytest.fixture(autouse=True) + def cleanup(self): + """Ensure services are stopped after each test.""" + yield + run_otdf_local("down", timeout=30) + + def test_up_down_cycle(self): + """Test basic up/down cycle with Docker only.""" + # Start just Docker services + result = run_otdf_local("up", "--services", "docker", timeout=120) + # May fail if Docker isn't available, which is okay for CI + if result.returncode != 0: + pytest.skip("Docker not available") + + # Give it a moment + time.sleep(2) + + # Check status + result = run_otdf_local("status", "--json") + assert result.returncode == 0 + + # Stop services + result = run_otdf_local("down", timeout=60) + assert result.returncode == 0 + + def test_clean_command(self): + """Test clean command.""" + result = run_otdf_local("clean") + assert result.returncode == 0 + assert "complete" in result.stdout.lower() diff --git a/otdf-local/uv.lock b/otdf-local/uv.lock new file mode 100644 index 00000000..4e0e4739 --- /dev/null +++ b/otdf-local/uv.lock @@ -0,0 +1,590 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "otdf-local" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "docker" }, + { name = "httpx" }, + { name = "pydantic-settings" }, + { name = "rich" }, + { name = "ruamel-yaml" }, + { name = "typer" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "docker", specifier = ">=7.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "pydantic-settings", specifier = ">=2.2.0" }, + { name = "rich", specifier = ">=13.7.0" }, + { name = "ruamel-yaml", specifier = ">=0.18.0" }, + { name = "typer", specifier = ">=0.12.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.408" }, + { name = "pytest", specifier = ">=8.0.0" }, + { name = "ruff", specifier = ">=0.9.0" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125, upload-time = "2026-01-24T21:40:44.847Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952, upload-time = "2026-01-24T21:40:42.969Z" }, +] + +[[package]] +name = "ruamel-yaml" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/3b/ebda527b56beb90cb7652cb1c7e4f91f48649fbcd8d2eb2fb6e77cd3329b/ruamel_yaml-0.19.1.tar.gz", hash = "sha256:53eb66cd27849eff968ebf8f0bf61f46cdac2da1d1f3576dd4ccee9b25c31993", size = 142709, upload-time = "2026-01-02T16:50:31.84Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/0c/51f6841f1d84f404f92463fc2b1ba0da357ca1e3db6b7fbda26956c3b82a/ruamel_yaml-0.19.1-py3-none-any.whl", hash = "sha256:27592957fedf6e0b62f281e96effd28043345e0e66001f97683aa9a40c667c93", size = 118102, upload-time = "2026-01-02T16:50:29.201Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" }, + { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" }, + { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" }, + { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" }, + { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" }, + { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" }, + { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" }, + { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" }, + { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" }, + { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "typer" +version = "0.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] diff --git a/otdf-sdk-mgr/README.md b/otdf-sdk-mgr/README.md new file mode 100644 index 00000000..bdc7abee --- /dev/null +++ b/otdf-sdk-mgr/README.md @@ -0,0 +1,84 @@ +# otdf-sdk-mgr + +SDK artifact management CLI for OpenTDF cross-client tests. Installs SDK CLIs from **released artifacts** (fast, deterministic) or **source** (for branch/PR testing). Both modes produce the same `sdk/{go,java,js}/dist/{version}/` directory structure. + +## Installation + +```bash +cd tests/otdf-sdk-mgr && uv tool install --editable . +``` + +## Commands + +### install + +```bash +# Install latest stable releases for all SDKs (recommended for local testing) +otdf-sdk-mgr install stable + +# Install LTS versions +otdf-sdk-mgr install lts + +# Install specific released versions +otdf-sdk-mgr install release go:v0.24.0 js:0.4.0 java:v0.9.0 + +# Install from tip of main branch (source build) +otdf-sdk-mgr install tip +otdf-sdk-mgr install tip go # Single SDK + +# Install a published version with optional dist name (defaults to version tag) +otdf-sdk-mgr install artifact --sdk go --version v0.24.0 +otdf-sdk-mgr install artifact --sdk go --version v0.24.0 --dist-name my-tag +``` + +### versions + +```bash +# List available versions +otdf-sdk-mgr versions list go --stable --latest 3 --table + +# Resolve version tags to SHAs +otdf-sdk-mgr versions resolve go main latest +``` + +### Other commands + +```bash +# Checkout SDK source +otdf-sdk-mgr checkout go main + +# Clean dist and source directories +otdf-sdk-mgr clean + +# Fix Java pom.xml after source checkout +otdf-sdk-mgr java-fixup +``` + +## How Release Installs Work + +- **Go**: Writes a `.version` file; `cli.sh`/`otdfctl.sh` use `go run github.com/opentdf/otdfctl@{version}` (no local compilation needed, Go caches the binary) +- **JS**: Runs `npm install @opentdf/ctl@{version}` into the dist directory; `cli.sh` uses `npx` from local `node_modules/` +- **Java**: Downloads `cmdline.jar` from GitHub Releases; `cli.sh` uses `java -jar cmdline.jar` + +## Source Builds + +Source builds (`tip` mode) delegate to `checkout-sdk-branch.sh` + `make`, which checks out source to `sdk/{lang}/src/` and compiles to `sdk/{lang}/dist/`. + +After changes to SDK source, rebuild: +```bash +otdf-sdk-mgr install tip go # or java, js + +# Or manually: checkout + make +cd sdk/go # or sdk/java, sdk/js +make + +# Verify build worked +ls -la dist/main/cli.sh +``` + +## Manual SDK Operations + +```bash +sdk/go/dist/main/cli.sh encrypt input.txt output.tdf --attr +sdk/go/dist/main/cli.sh decrypt output.tdf decrypted.txt +``` diff --git a/otdf-sdk-mgr/pyproject.toml b/otdf-sdk-mgr/pyproject.toml new file mode 100644 index 00000000..ce144e56 --- /dev/null +++ b/otdf-sdk-mgr/pyproject.toml @@ -0,0 +1,31 @@ +[project] +name = "otdf-sdk-mgr" +version = "0.1.0" +description = "SDK artifact management CLI for OpenTDF cross-client tests" +requires-python = ">=3.11" +dependencies = [ + "gitpython>=3.1.46", + "rich>=13.7.0", + "typer>=0.12.0", +] + +[dependency-groups] +dev = [ + "pyright>=1.1.408", + "pytest>=8.0.0", + "ruff>=0.9.0", +] + +[project.scripts] +otdf-sdk-mgr = "otdf_sdk_mgr.cli:app" + +[build-system] +requires = ["uv_build>=0.9.28"] +build-backend = "uv_build" + +[tool.hatch.build.targets.wheel] +packages = ["src/otdf_sdk_mgr"] + +[tool.ruff] +target-version = "py311" +line-length = 100 diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/__init__.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/__init__.py new file mode 100644 index 00000000..cd99ab09 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/__init__.py @@ -0,0 +1,3 @@ +"""SDK artifact management CLI for OpenTDF cross-client tests.""" + +__version__ = "0.1.0" diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/__main__.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/__main__.py new file mode 100644 index 00000000..a61eb8cd --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/__main__.py @@ -0,0 +1,5 @@ +"""Allow running as `python -m otdf_sdk_mgr`.""" + +from otdf_sdk_mgr.cli import app + +app() diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/checkout.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/checkout.py new file mode 100644 index 00000000..3fc7e5c4 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/checkout.py @@ -0,0 +1,74 @@ +"""SDK source checkout using bare repos and worktrees.""" + +from __future__ import annotations + +import subprocess +import sys +from typing import Any + +from otdf_sdk_mgr.config import SDK_BARE_REPOS, SDK_DIRS, SDK_GIT_URLS + + +def _run(cmd: list[str], **kwargs: Any) -> None: + """Run a command, exiting on failure.""" + result = subprocess.run(cmd, **kwargs) + if result.returncode != 0: + print(f"Error: Command '{' '.join(cmd)}' failed.", file=sys.stderr) + sys.exit(result.returncode) + + +def checkout_sdk_branch(language: str, branch: str) -> None: + """Clone bare repo and create/update a worktree for the given branch. + + Python port of checkout-sdk-branch.sh. + """ + if language not in SDK_DIRS: + print( + f"Error: Unsupported language '{language}'. " + f"Supported values are: {', '.join(SDK_DIRS)}", + file=sys.stderr, + ) + sys.exit(1) + + sdk_dir = SDK_DIRS[language] + bare_repo_name = SDK_BARE_REPOS[language] + # Strip .git suffix to get the base URL for git clone + repo_url = SDK_GIT_URLS[language].removesuffix(".git") + + bare_repo_path = sdk_dir / "src" / bare_repo_name + local_name = branch.replace("/", "--") + if local_name.startswith("sdk--"): + local_name = local_name.removeprefix("sdk--") + worktree_path = sdk_dir / "src" / local_name + + if not bare_repo_path.exists(): + print(f"Cloning {repo_url} as a bare repository into {bare_repo_path}...") + _run(["git", "clone", "--bare", repo_url, str(bare_repo_path)]) + else: + print(f"Bare repository already exists at {bare_repo_path}. Fetching updates...") + _run(["git", f"--git-dir={bare_repo_path}", "fetch", "--all"]) + + if worktree_path.exists(): + print(f"Worktree for branch '{branch}' already exists at {worktree_path}. Updating...") + _run( + [ + "git", + f"--git-dir={bare_repo_path}", + f"--work-tree={worktree_path}", + "pull", + "origin", + branch, + ] + ) + else: + print(f"Setting up worktree for branch '{branch}' at {worktree_path}...") + _run( + [ + "git", + f"--git-dir={bare_repo_path}", + "worktree", + "add", + str(worktree_path), + branch, + ] + ) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py new file mode 100644 index 00000000..2da0edf6 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py @@ -0,0 +1,89 @@ +"""Main CLI application for otdf-sdk-mgr.""" + +from __future__ import annotations + +import shutil +from pathlib import Path +from typing import Annotated, Optional + +import typer + +from otdf_sdk_mgr.cli_install import install_app +from otdf_sdk_mgr.cli_versions import versions_app +from otdf_sdk_mgr.config import ALL_SDKS, SDK_DIRS + +app = typer.Typer( + name="otdf-sdk-mgr", + help="SDK artifact management CLI for OpenTDF cross-client tests.", + no_args_is_help=True, +) + +app.add_typer(install_app, name="install") +app.add_typer(versions_app, name="versions") + + +@app.command() +def checkout( + sdk: Annotated[ + Optional[str], + typer.Argument(help="SDK to checkout (go, js, java)"), + ] = None, + branch: Annotated[str, typer.Argument(help="Branch to checkout")] = "main", + all_sdks: Annotated[bool, typer.Option("--all", help="Checkout all SDKs")] = False, +) -> None: + """Clone bare repo and create/update worktree for an SDK branch.""" + from otdf_sdk_mgr.checkout import checkout_sdk_branch + + if all_sdks: + for s in ALL_SDKS: + checkout_sdk_branch(s, branch) + elif sdk: + checkout_sdk_branch(sdk, branch) + else: + typer.echo("Error: provide an SDK name or use --all", err=True) + raise typer.Exit(1) + + +@app.command() +def clean( + dist_only: Annotated[ + bool, typer.Option("--dist-only", help="Only remove dist directories") + ] = False, + src_only: Annotated[ + bool, typer.Option("--src-only", help="Only remove source worktrees") + ] = False, +) -> None: + """Remove dist directories and/or source worktrees.""" + remove_dist = not src_only + remove_src = not dist_only + + for sdk in ALL_SDKS: + sdk_dir = SDK_DIRS[sdk] + if remove_dist: + dist_dir = sdk_dir / "dist" + if dist_dir.exists(): + shutil.rmtree(dist_dir) + typer.echo(f"Removed {dist_dir}") + + if remove_src: + src_dir = sdk_dir / "src" + if src_dir.exists(): + for entry in sorted(src_dir.iterdir()): + if entry.name.endswith(".git"): + continue + if entry.is_dir(): + shutil.rmtree(entry) + typer.echo(f"Removed {entry}") + + +@app.command("java-fixup") +def java_fixup( + base_dir: Annotated[ + Optional[Path], + typer.Argument(help="Base directory for Java source trees"), + ] = None, +) -> None: + """Fix pom.xml platform.branch property in Java SDK source trees.""" + from otdf_sdk_mgr.java_fixup import post_checkout_java_fixup + + post_checkout_java_fixup(base_dir) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py new file mode 100644 index 00000000..7234e196 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py @@ -0,0 +1,80 @@ +"""Install subcommand group for otdf-sdk-mgr.""" + +from __future__ import annotations + +from typing import Annotated, Optional + +import typer + +from otdf_sdk_mgr.config import ALL_SDKS + +install_app = typer.Typer(help="Install SDK CLI artifacts from registries or source.") + + +@install_app.command() +def stable( + sdks: Annotated[ + Optional[list[str]], + typer.Argument(help="SDKs to install (default: all)"), + ] = None, +) -> None: + """Install latest stable releases for each SDK.""" + from otdf_sdk_mgr.installers import cmd_stable + + cmd_stable(sdks or ALL_SDKS) + + +@install_app.command() +def lts( + sdks: Annotated[ + Optional[list[str]], + typer.Argument(help="SDKs to install (default: all)"), + ] = None, +) -> None: + """Install LTS versions for each SDK.""" + from otdf_sdk_mgr.installers import cmd_lts + + cmd_lts(sdks or ALL_SDKS) + + +@install_app.command() +def tip( + sdks: Annotated[ + Optional[list[str]], + typer.Argument(help="SDKs to build from source (default: all)"), + ] = None, +) -> None: + """Source checkout + build from main.""" + from otdf_sdk_mgr.installers import cmd_tip + + cmd_tip(sdks or ALL_SDKS) + + +@install_app.command() +def release( + specs: Annotated[ + list[str], + typer.Argument(help="Version specs as SDK:VERSION (e.g., go:v0.24.0)"), + ], +) -> None: + """Install specific released versions.""" + from otdf_sdk_mgr.installers import cmd_release + + cmd_release(specs) + + +@install_app.command() +def artifact( + sdk: Annotated[str, typer.Option(help="SDK to install")] = "", + version: Annotated[str, typer.Option(help="Version to install")] = "", + dist_name: Annotated[ + Optional[str], typer.Option("--dist-name", help="Override dist directory name") + ] = None, +) -> None: + """Install a single SDK version (used by CI).""" + if not sdk or not version: + typer.echo("Error: --sdk and --version are required", err=True) + raise typer.Exit(1) + from otdf_sdk_mgr.installers import cmd_install + + cmd_install(sdk, version, dist_name=dist_name) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py new file mode 100644 index 00000000..19a614ae --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py @@ -0,0 +1,129 @@ +"""Versions subcommand group for otdf-sdk-mgr.""" + +from __future__ import annotations + +import json +from typing import Annotated, Any, Optional + +import typer +from rich.console import Console +from rich.table import Table + +from otdf_sdk_mgr.config import SDK_GIT_URLS, SDK_TAG_INFIXES +from otdf_sdk_mgr.resolve import ResolveResult + +versions_app = typer.Typer(help="Query SDK version registries.") + + +@versions_app.command("list") +def list_versions( + sdk: Annotated[ + str, + typer.Argument(help="SDK to query (go, js, java, all)"), + ] = "all", + stable: Annotated[bool, typer.Option("--stable", help="Only stable versions")] = False, + latest: Annotated[ + Optional[int], typer.Option("--latest", help="Show only N most recent versions") + ] = None, + releases: Annotated[ + bool, typer.Option("--releases", help="Include GitHub Releases info for Java") + ] = False, + output_json: Annotated[bool, typer.Option("--json", help="JSON output (default)")] = False, + output_table: Annotated[ + bool, typer.Option("--table", help="Human-readable Rich table output") + ] = False, +) -> None: + """List available released versions of SDK CLIs.""" + from otdf_sdk_mgr.registry import ( + apply_filters, + list_go_versions, + list_java_github_releases, + list_java_maven_versions, + list_js_versions, + ) + + sdks = ["go", "js", "java"] if sdk == "all" else [sdk] + all_entries: list[dict[str, Any]] = [] + + for s in sdks: + if s == "go": + entries = list_go_versions() + all_entries.extend(apply_filters(entries, stable_only=stable, latest_n=latest)) + elif s == "js": + entries = list_js_versions() + all_entries.extend(apply_filters(entries, stable_only=stable, latest_n=latest)) + elif s == "java": + maven_entries = list_java_maven_versions() + all_entries.extend(apply_filters(maven_entries, stable_only=stable, latest_n=latest)) + if releases: + gh_entries = list_java_github_releases() + all_entries.extend(apply_filters(gh_entries, stable_only=stable, latest_n=latest)) + + if output_table: + _print_rich_table(all_entries) + else: + print(json.dumps(all_entries, indent=2)) + + +def _print_rich_table(entries: list[dict[str, Any]]) -> None: + """Print entries as a Rich table.""" + if not entries: + Console().print("[dim](no results)[/dim]") + return + + table = Table(show_header=True, header_style="bold") + table.add_column("SDK", width=6) + table.add_column("VERSION", width=20) + table.add_column("SOURCE", width=16) + table.add_column("STABLE", width=7) + table.add_column("HAS_CLI", width=8) + table.add_column("INSTALL_METHOD", min_width=40) + + for entry in entries: + stable_val = entry.get("stable", "") + stable_str = "yes" if stable_val else "no" if stable_val is not None else "" + has_cli_val = entry.get("has_cli", "") + has_cli_str = "yes" if has_cli_val else "no" if has_cli_val is not None else "" + table.add_row( + str(entry.get("sdk", "")), + str(entry.get("version", "")), + str(entry.get("source", "")), + stable_str, + has_cli_str, + str(entry.get("install_method", "")), + ) + + Console().print(table) + + +@versions_app.command("resolve") +def resolve_versions( + sdk: Annotated[str, typer.Argument(help="SDK to resolve (go, js, java, platform)")], + tags: Annotated[list[str], typer.Argument(help="Version tags to resolve")], +) -> None: + """Resolve version tags to git SHAs.""" + from otdf_sdk_mgr.resolve import ( + is_resolve_success, + lookup_additional_options, + resolve, + ) + + if sdk not in SDK_GIT_URLS: + typer.echo(f"Unknown SDK: {sdk}", err=True) + raise typer.Exit(2) + infix = SDK_TAG_INFIXES.get(sdk) + + results: list[ResolveResult] = [] + shas: set[str] = set() + for version in tags: + v = resolve(sdk, version, infix) + if is_resolve_success(v): + env = lookup_additional_options(sdk, v["tag"]) + if env: + v["env"] = env + if v["sha"] in shas: + continue + shas.add(v["sha"]) + results.append(v) + + print(json.dumps(results)) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py new file mode 100644 index 00000000..d754131c --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py @@ -0,0 +1,100 @@ +"""Constants and path discovery for SDK management.""" + +from __future__ import annotations + +from pathlib import Path + +# Discover the tests/ directory by walking up from this package +# In CI, the checkout may be named otdftests/ instead of tests/ +# Look for the xtest/sdk structure to identify the root +_PACKAGE_DIR = Path(__file__).resolve().parent +_TESTS_DIR = _PACKAGE_DIR +while _TESTS_DIR != _TESTS_DIR.parent: + if (_TESTS_DIR / "xtest" / "sdk").exists(): + break + _TESTS_DIR = _TESTS_DIR.parent + +SDK_DIR = _TESTS_DIR / "xtest" / "sdk" +if not SDK_DIR.exists(): + raise RuntimeError( + f"Could not locate xtest/sdk directory. " + f"Started from {_PACKAGE_DIR}, walked up to {_TESTS_DIR}. " + f"Expected to find xtest/sdk structure in repository root." + ) +GO_DIR = SDK_DIR / "go" +JS_DIR = SDK_DIR / "js" +JAVA_DIR = SDK_DIR / "java" +SCRIPTS_DIR = SDK_DIR / "scripts" + +SDK_DIRS: dict[str, Path] = { + "go": GO_DIR, + "js": JS_DIR, + "java": JAVA_DIR, +} + +# Git repository URLs (unified from resolve-version.py sdk_urls + list-versions.py sdk_git_urls) +SDK_GIT_URLS: dict[str, str] = { + "go": "https://github.com/opentdf/otdfctl.git", + "java": "https://github.com/opentdf/java-sdk.git", + "js": "https://github.com/opentdf/web-sdk.git", + "platform": "https://github.com/opentdf/platform.git", +} + +SDK_NPM_PACKAGES: dict[str, str] = { + "js": "@opentdf/ctl", +} + +SDK_MAVEN_COORDS: dict[str, dict[str, str]] = { + "java": { + "group": "io.opentdf.platform", + "artifact": "sdk", + "base_url": "https://repo1.maven.org/maven2/io/opentdf/platform/sdk", + }, +} + +SDK_GITHUB_REPOS: dict[str, str] = { + "java": "opentdf/java-sdk", +} + +GO_INSTALL_PREFIX = "go run github.com/opentdf/otdfctl" + +LTS_VERSIONS: dict[str, str] = { + "go": "0.24.0", + "java": "0.9.0", + "js": "0.4.0", + "platform": "0.9.0", +} + +# Java SDK version -> compatible platform protocol branch +# Must stay in sync with resolve-version.py lookup_additional_options +JAVA_PLATFORM_BRANCH_MAP: dict[str, str] = { + "0.7.8": "protocol/go/v0.2.29", + "0.7.7": "protocol/go/v0.2.29", + "0.7.6": "protocol/go/v0.2.25", + "0.7.5": "protocol/go/v0.2.18", + "0.7.4": "protocol/go/v0.2.18", + "0.7.3": "protocol/go/v0.2.17", + "0.7.2": "protocol/go/v0.2.17", + "0.6.1": "protocol/go/v0.2.14", + "0.6.0": "protocol/go/v0.2.14", + "0.5.0": "protocol/go/v0.2.13", + "0.4.0": "protocol/go/v0.2.10", + "0.3.0": "protocol/go/v0.2.10", + "0.2.0": "protocol/go/v0.2.10", + "0.1.0": "protocol/go/v0.2.3", +} + +# Bare repo names per SDK (used by checkout) +SDK_BARE_REPOS: dict[str, str] = { + "go": "otdfctl.git", + "java": "java-sdk.git", + "js": "web-sdk.git", +} + +# Tag infixes for monorepo tag resolution +SDK_TAG_INFIXES: dict[str, str] = { + "js": "sdk", + "platform": "service", +} + +ALL_SDKS = ["go", "js", "java"] diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py new file mode 100644 index 00000000..326f48fb --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py @@ -0,0 +1,216 @@ +"""SDK CLI installation functions.""" + +from __future__ import annotations + +import shutil +import subprocess +import sys +import urllib.error +import urllib.request +from pathlib import Path + +from otdf_sdk_mgr.config import ( + GO_DIR, + JAVA_DIR, + JS_DIR, + LTS_VERSIONS, + SDK_DIRS, + SCRIPTS_DIR, +) +from otdf_sdk_mgr.registry import list_go_versions, list_java_github_releases, list_js_versions +from otdf_sdk_mgr.semver import normalize_version + + +def install_go_release(version: str, dist_dir: Path) -> None: + """Install a Go CLI release by writing a .version file. + + The cli.sh and otdfctl.sh wrappers read .version and use + `go run github.com/opentdf/otdfctl@{version}` instead of a local binary. + """ + dist_dir.mkdir(parents=True, exist_ok=True) + tag = normalize_version(version) + (dist_dir / ".version").write_text(f"{tag}\n") + shutil.copy(GO_DIR / "cli.sh", dist_dir / "cli.sh") + shutil.copy(GO_DIR / "otdfctl.sh", dist_dir / "otdfctl.sh") + shutil.copy(GO_DIR / "opentdfctl.yaml", dist_dir / "opentdfctl.yaml") + print(f" Pre-warming Go cache for otdfctl@{tag}...") + result = subprocess.run( + ["go", "install", f"github.com/opentdf/otdfctl@{tag}"], + capture_output=True, + text=True, + ) + if result.returncode != 0: + print( + f" Warning: go install pre-warm failed (will retry at runtime): {result.stderr.strip()}" + ) + print(f" Go release {tag} installed to {dist_dir}") + + +def install_js_release(version: str, dist_dir: Path) -> None: + """Install a JS CLI release from npm registry.""" + dist_dir.mkdir(parents=True, exist_ok=True) + shutil.copy(JS_DIR / "cli.sh", dist_dir / "cli.sh") + # Strip infix prefix (e.g., "sdk/v0.4.0" -> "v0.4.0") for npm install + v = version.split("/")[-1].lstrip("v") + print(f" Installing @opentdf/ctl@{v} from npm...") + subprocess.check_call( + ["npm", "install", f"@opentdf/ctl@{v}"], + cwd=dist_dir, + ) + print(f" JS release {v} installed to {dist_dir}") + + +def install_java_release(version: str, dist_dir: Path) -> None: + """Install a Java CLI release by downloading cmdline.jar from GitHub Releases. + + Falls back gracefully if the artifact is not available - the caller can + then build from source instead. + """ + tag = normalize_version(version) + url = f"https://github.com/opentdf/java-sdk/releases/download/{tag}/cmdline.jar" + + # Check if artifact exists before trying to download + try: + req = urllib.request.Request(url, method="HEAD") + urllib.request.urlopen(req, timeout=10) + except urllib.error.HTTPError as e: + if e.code == 404: + print(f" Warning: cmdline.jar not found for {tag}.", file=sys.stderr) + print(f" The release {tag} does not include a CLI artifact.", file=sys.stderr) + print(" This version will need to be built from source.", file=sys.stderr) + print( + f" Check: https://github.com/opentdf/java-sdk/releases/tag/{tag}", + file=sys.stderr, + ) + # Clean up partial dist dir if it was created + if dist_dir.exists(): + shutil.rmtree(dist_dir) + # Exit with error so caller knows to fall back to source build + sys.exit(1) + raise + except Exception as e: + print(f" Warning: Could not verify artifact availability: {e}", file=sys.stderr) + # Proceed with download attempt anyway + pass + + # Artifact exists, proceed with download + dist_dir.mkdir(parents=True, exist_ok=True) + shutil.copy(JAVA_DIR / "cli.sh", dist_dir / "cli.sh") + jar_path = dist_dir / "cmdline.jar" + print(f" Downloading cmdline.jar from {url}...") + try: + urllib.request.urlretrieve(url, jar_path) + except urllib.error.HTTPError as e: + if e.code == 404: + print(f" Error: cmdline.jar not found for {tag} (race condition?).", file=sys.stderr) + sys.exit(1) + raise + print(f" Java release {tag} installed to {dist_dir}") + + +INSTALLERS = { + "go": install_go_release, + "js": install_js_release, + "java": install_java_release, +} + + +def install_release(sdk: str, version: str, dist_name: str | None = None) -> Path: + """Install a released version of an SDK CLI. + + Args: + sdk: One of "go", "js", "java" + version: Version string (e.g., "v0.24.0" or "0.24.0") + dist_name: Override the dist directory name (defaults to normalized version) + + Returns: + Path to the created dist directory + """ + if sdk not in INSTALLERS: + print( + f"Error: Unknown SDK '{sdk}'. Must be one of: {', '.join(INSTALLERS)}", + file=sys.stderr, + ) + sys.exit(1) + + name = dist_name or normalize_version(version) + dist_dir = SDK_DIRS[sdk] / "dist" / name + if dist_dir.exists(): + print(f" Dist directory already exists: {dist_dir} (skipping)") + return dist_dir + + INSTALLERS[sdk](version, dist_dir) + return dist_dir + + +def latest_stable_version(sdk: str) -> str | None: + """Find the latest stable version for an SDK that has a CLI available.""" + if sdk == "go": + versions = list_go_versions() + stable = [v for v in versions if v.get("stable", False)] + return stable[-1]["version"] if stable else None + elif sdk == "js": + versions = list_js_versions() + stable = [v for v in versions if v.get("stable", False)] + return stable[-1]["version"] if stable else None + elif sdk == "java": + releases = list_java_github_releases() + stable_with_cli = [ + v for v in releases if v.get("stable", False) and v.get("has_cli", False) + ] + return stable_with_cli[-1]["version"] if stable_with_cli else None + return None + + +def cmd_stable(sdks: list[str]) -> None: + """Install the latest stable release for each SDK.""" + for sdk in sdks: + print(f"Finding latest stable {sdk} release...") + version = latest_stable_version(sdk) + if version is None: + print(f" Warning: No stable version found for {sdk}, skipping") + continue + print(f" Latest stable {sdk}: {version}") + install_release(sdk, version) + + +def cmd_lts(sdks: list[str]) -> None: + """Install LTS versions for each SDK.""" + for sdk in sdks: + version = LTS_VERSIONS.get(sdk) + if version is None: + print(f" Warning: No LTS version defined for {sdk}, skipping") + continue + print(f"Installing LTS {sdk} {version}...") + install_release(sdk, version) + + +def cmd_tip(sdks: list[str]) -> None: + """Delegate to source checkout + make for head builds.""" + for sdk in sdks: + print(f"Checking out and building {sdk} from source...") + checkout_script = SCRIPTS_DIR / "checkout-sdk-branch.sh" + subprocess.check_call([str(checkout_script), sdk, "main"]) + make_dir = SDK_DIRS[sdk] + subprocess.check_call(["make"], cwd=make_dir) + print(f" {sdk} built from source") + + +def cmd_release(specs: list[str]) -> None: + """Install specific released versions from sdk:version specs.""" + for spec in specs: + if ":" not in spec: + print( + f"Error: Invalid spec '{spec}'. Use format sdk:version (e.g., go:v0.24.0)", + file=sys.stderr, + ) + sys.exit(1) + sdk, version = spec.split(":", 1) + print(f"Installing {sdk} {version} from registry...") + install_release(sdk, version) + + +def cmd_install(sdk: str, version: str, dist_name: str | None = None) -> None: + """Install a single SDK version (used by CI action).""" + print(f"Installing {sdk} {version}...") + install_release(sdk, version, dist_name=dist_name) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/java_fixup.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/java_fixup.py new file mode 100644 index 00000000..e019aad3 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/java_fixup.py @@ -0,0 +1,92 @@ +"""Post-checkout fixups for Java SDK source trees.""" + +from __future__ import annotations + +import re +from pathlib import Path + +from otdf_sdk_mgr.config import JAVA_DIR, JAVA_PLATFORM_BRANCH_MAP + + +def _get_platform_branch(version: str) -> str: + """Map Java SDK version to compatible platform protocol branch.""" + return JAVA_PLATFORM_BRANCH_MAP.get(version, "main") + + +def post_checkout_java_fixup(base_dir: Path | None = None) -> None: + """Fix pom.xml platform.branch property in Java SDK source trees. + + Python port of post-checkout-java.sh. + """ + if base_dir is None: + base_dir = JAVA_DIR / "src" + + if not base_dir.exists(): + print(f"Base directory {base_dir} does not exist, nothing to fix.") + return + + for src_dir in sorted(base_dir.iterdir()): + if not src_dir.is_dir() or src_dir.name.endswith(".git"): + continue + + pom_file = src_dir / "sdk" / "pom.xml" + if not pom_file.exists(): + print(f"No pom.xml file found in {src_dir}, skipping.") + continue + + # Extract version from directory name (e.g., "v0.7.5" -> "0.7.5") + dir_name = src_dir.name + version = dir_name.lstrip("v") + platform_branch = _get_platform_branch(version) + + pom_content = pom_file.read_text() + + # Check if the correct platform.branch is already set + if f"{platform_branch}" in pom_content: + print(f"platform.branch already set to {platform_branch} in {pom_file}, skipping.") + continue + + # If we don't have a specific mapping (defaults to "main"), + # check if there's already a valid protocol/go branch set + if platform_branch == "main": + match = re.search(r"([^<]*)", pom_content) + if match and match.group(1).startswith("protocol/go/"): + print( + f"platform.branch already set to {match.group(1)} in {pom_file} " + f"(no mapping for version {version}), skipping." + ) + continue + + print(f"Updating {pom_file} (version={version}, platform.branch={platform_branch})...") + + if "" in pom_content: + # Replace existing platform.branch value + pom_content = re.sub( + r"[^<]*", + f"{platform_branch}", + pom_content, + ) + print(f"Updated existing platform.branch to {platform_branch} in {pom_file}") + elif "" in pom_content: + # Add the platform.branch property after + pom_content = pom_content.replace( + "", + f"\n {platform_branch}", + ) + # Replace hardcoded branch=main with branch=${platform.branch} + pom_content = pom_content.replace("branch=main", "branch=${platform.branch}") + print( + f"Added platform.branch={platform_branch} " + f"and updated branch references in {pom_file}" + ) + else: + # No section, directly replace branch=main + pom_content = pom_content.replace("branch=main", f"branch={platform_branch}") + print( + f"No section, directly replaced branch=main " + f"with branch={platform_branch} in {pom_file}" + ) + + pom_file.write_text(pom_content) + + print("Update complete.") diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py new file mode 100644 index 00000000..009a2550 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py @@ -0,0 +1,178 @@ +"""Registry queries for SDK version discovery.""" + +from __future__ import annotations + +import json +import re +import sys +import urllib.error +import urllib.request +from typing import Any + +from otdf_sdk_mgr.config import ( + GO_INSTALL_PREFIX, + SDK_GITHUB_REPOS, + SDK_GIT_URLS, + SDK_MAVEN_COORDS, + SDK_NPM_PACKAGES, +) +from otdf_sdk_mgr.semver import is_stable, parse_semver, semver_sort_key + + +def fetch_json(url: str) -> Any: + """Fetch JSON from a URL.""" + req = urllib.request.Request(url, headers={"Accept": "application/json"}) + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode()) + + +def fetch_text(url: str) -> str: + """Fetch text content from a URL.""" + req = urllib.request.Request(url) + with urllib.request.urlopen(req, timeout=30) as resp: + return resp.read().decode() + + +def list_go_versions() -> list[dict[str, Any]]: + """List Go SDK versions from git tags.""" + from git import Git + + repo = Git() + raw = repo.ls_remote(SDK_GIT_URLS["go"], tags=True) + results = [] + for line in raw.strip().split("\n"): + if not line: + continue + _, ref = line.split("\t", 1) + if ref.endswith("^{}"): + continue + tag = ref.removeprefix("refs/tags/") + if not parse_semver(tag): + continue + version = tag + results.append( + { + "sdk": "go", + "version": version, + "source": "git-tag", + "install_method": f"{GO_INSTALL_PREFIX}@{version}", + "stable": is_stable(version), + } + ) + results.sort(key=lambda r: semver_sort_key(r["version"])) + return results + + +def list_js_versions() -> list[dict[str, Any]]: + """List JS SDK versions from npm registry.""" + package = SDK_NPM_PACKAGES["js"] + url = f"https://registry.npmjs.org/{package}" + try: + data = fetch_json(url) + except urllib.error.URLError as e: + print(f"Warning: failed to fetch npm registry: {e}", file=sys.stderr) + return [] + + dist_tags: dict[str, str] = data.get("dist-tags", {}) + tag_lookup: dict[str, list[str]] = {} + for tag_name, tag_version in dist_tags.items(): + tag_lookup.setdefault(tag_version, []).append(tag_name) + + versions_dict: dict[str, Any] = data.get("versions", {}) + results = [] + for version in versions_dict: + if not parse_semver(version): + continue + entry: dict[str, Any] = { + "sdk": "js", + "version": version, + "source": "npm", + "install_method": f"npx {package}@{version}", + "stable": is_stable(version), + } + if version in tag_lookup: + entry["dist_tags"] = tag_lookup[version] + results.append(entry) + results.sort(key=lambda r: semver_sort_key(r["version"])) + return results + + +def list_java_maven_versions() -> list[dict[str, Any]]: + """List Java SDK versions from Maven Central metadata.""" + coords = SDK_MAVEN_COORDS["java"] + url = f"{coords['base_url']}/maven-metadata.xml" + try: + xml_text = fetch_text(url) + except urllib.error.URLError as e: + print(f"Warning: failed to fetch Maven metadata: {e}", file=sys.stderr) + return [] + + versions = re.findall(r"([^<]+)", xml_text) + results = [] + for version in versions: + if not parse_semver(version): + continue + results.append( + { + "sdk": "java", + "version": version, + "source": "maven", + "stable": is_stable(version), + "has_cli": False, + } + ) + results.sort(key=lambda r: semver_sort_key(r["version"])) + return results + + +def list_java_github_releases() -> list[dict[str, Any]]: + """List Java SDK versions from GitHub Releases (checks for cmdline.jar).""" + repo = SDK_GITHUB_REPOS["java"] + results = [] + page = 1 + while True: + url = f"https://api.github.com/repos/{repo}/releases?per_page=100&page={page}" + try: + releases = fetch_json(url) + except urllib.error.URLError as e: + print(f"Warning: failed to fetch GitHub releases: {e}", file=sys.stderr) + break + if not releases: + break + for release in releases: + tag = release.get("tag_name", "") + if not parse_semver(tag): + continue + assets = release.get("assets", []) + asset_names = [a["name"] for a in assets] + has_cli = any("cmdline" in name for name in asset_names) + entry: dict[str, Any] = { + "sdk": "java", + "version": tag, + "source": "github-release", + "stable": is_stable(tag) and not release.get("prerelease", False), + } + if asset_names: + entry["artifacts"] = asset_names + entry["has_cli"] = has_cli + if has_cli: + cli_asset = next(a for a in assets if "cmdline" in a["name"]) + entry["install_method"] = f"download from {cli_asset['browser_download_url']}" + results.append(entry) + page += 1 + results.sort(key=lambda r: semver_sort_key(r["version"])) + return results + + +def apply_filters( + entries: list[dict[str, Any]], + *, + stable_only: bool = False, + latest_n: int | None = None, +) -> list[dict[str, Any]]: + """Filter version entries by stability and count.""" + if stable_only: + entries = [e for e in entries if e.get("stable", False)] + if latest_n is not None: + entries = entries[-latest_n:] + return entries diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py new file mode 100644 index 00000000..7ad335bb --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py @@ -0,0 +1,301 @@ +"""Version resolution for SDK tags/branches/SHAs.""" + +from __future__ import annotations + +import json +import re +import sys +from typing import NotRequired, TypedDict, TypeGuard + +from git import Git + +from otdf_sdk_mgr.config import ( + JAVA_PLATFORM_BRANCH_MAP, + LTS_VERSIONS, + SDK_GIT_URLS, + SDK_TAG_INFIXES, +) + + +class ResolveSuccess(TypedDict): + sdk: str + alias: str + env: NotRequired[str] + head: NotRequired[bool] + pr: NotRequired[str] + release: NotRequired[str] + sha: str + tag: str + + +class ResolveError(TypedDict): + sdk: str + alias: str + err: str + + +ResolveResult = ResolveSuccess | ResolveError + + +def is_resolve_error(val: ResolveResult) -> TypeGuard[ResolveError]: + """Check if the given value is a ResolveError type.""" + return "err" in val + + +def is_resolve_success(val: ResolveResult) -> TypeGuard[ResolveSuccess]: + """Check if the given value is a ResolveSuccess type.""" + return "err" not in val and "sha" in val and "tag" in val + + +MERGE_QUEUE_REGEX = ( + r"^refs/heads/gh-readonly-queue/(?P[^/]+)/pr-(?P\d+)-(?P[a-f0-9]{40})$" +) + +SHA_REGEX = r"^[a-f0-9]{7,40}$" + + +def lookup_additional_options(sdk: str, version: str) -> str | None: + """Look up additional build options for a given SDK version.""" + if sdk != "java": + return None + if version.startswith("v"): + version = version[1:] + branch = JAVA_PLATFORM_BRANCH_MAP.get(version) + if branch: + return f"PLATFORM_BRANCH={branch}" + return None + + +def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: + """Resolve a version spec to a concrete SHA and tag.""" + sdk_url = SDK_GIT_URLS[sdk] + try: + repo = Git() + if version == "main" or version == "refs/heads/main": + all_heads = [r.split("\t") for r in repo.ls_remote(sdk_url, heads=True).split("\n")] + sha, _ = [tag for tag in all_heads if "refs/heads/main" in tag][0] + return { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": "main", + } + + if re.match(SHA_REGEX, version): + ls_remote = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] + matching_tags = [(sha, tag) for (sha, tag) in ls_remote if sha.startswith(version)] + if not matching_tags: + return { + "sdk": sdk, + "alias": version[:7], + "sha": version, + "tag": version, + } + if len(matching_tags) > 1: + for sha, tag in matching_tags: + if tag.startswith("refs/pull/"): + pr_number = tag.split("/")[2] + return { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": f"pull-{pr_number}", + } + for sha, tag in matching_tags: + mq_match = re.match(MERGE_QUEUE_REGEX, tag) + if mq_match: + to_branch = mq_match.group("branch") + pr_number = mq_match.group("pr_number") + if to_branch and pr_number: + return { + "sdk": sdk, + "alias": version, + "head": True, + "pr": pr_number, + "sha": sha, + "tag": f"mq-{to_branch}-{pr_number}", + } + suffix = tag.split("refs/heads/gh-readonly-queue/")[-1] + flattag = "mq--" + suffix.replace("/", "--") + return { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": flattag, + } + head = False + if tag.startswith("refs/heads/"): + head = True + tag = tag.split("refs/heads/")[-1] + flattag = tag.replace("/", "--") + return { + "sdk": sdk, + "alias": version, + "head": head, + "sha": sha, + "tag": flattag, + } + + return { + "sdk": sdk, + "alias": version, + "err": ( + f"SHA {version} points to multiple tags, unable to differentiate: " + f"{', '.join(tag for _, tag in matching_tags)}" + ), + } + (sha, tag) = matching_tags[0] + if tag.startswith("refs/tags/"): + tag = tag.split("refs/tags/")[-1] + if infix: + tag = tag.split(f"{infix}/")[-1] + return { + "sdk": sdk, + "alias": version, + "sha": sha, + "tag": tag, + } + + if version.startswith("refs/pull/"): + merge_heads = [ + r.split("\t") for r in repo.ls_remote(sdk_url).split("\n") if r.endswith(version) + ] + pr_number = version.split("/")[2] + if not merge_heads: + return { + "sdk": sdk, + "alias": version, + "err": f"pull request {pr_number} not found in {sdk_url}", + } + sha, _ = merge_heads[0] + return { + "sdk": sdk, + "alias": version, + "head": True, + "pr": pr_number, + "sha": sha, + "tag": f"pull-{pr_number}", + } + + remote_tags = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] + all_listed_tags = [ + (sha, tag.split("refs/tags/")[-1]) for (sha, tag) in remote_tags if "refs/tags/" in tag + ] + + all_listed_branches = { + tag.split("refs/heads/")[-1]: sha + for (sha, tag) in remote_tags + if tag.startswith("refs/heads/") + } + + if version in all_listed_branches: + sha = all_listed_branches[version] + return { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": version, + } + + if infix and version.startswith(f"{infix}/"): + version = version.split(f"{infix}/")[-1] + + listed_tags = all_listed_tags + if infix: + listed_tags = [ + (sha, tag.split(f"{infix}/")[-1]) + for (sha, tag) in listed_tags + if f"{infix}/" in tag + ] + semver_regex = r"v?\d+\.\d+\.\d+$" + listed_tags = [(sha, tag) for (sha, tag) in listed_tags if re.search(semver_regex, tag)] + listed_tags.sort(key=lambda item: list(map(int, item[1].strip("v").split(".")))) + alias = version + matching_tags = [] + if version == "latest": + # For Java, check if CLI artifacts are available and fall back to source build if not + if sdk == "java": + from otdf_sdk_mgr.registry import list_java_github_releases + + gh_releases = list_java_github_releases() + # Find the latest version with CLI artifact + versions_with_cli = [r for r in gh_releases if r.get("has_cli", False)] + if versions_with_cli: + # Use the latest version that has CLI + latest_with_cli_tag = versions_with_cli[-1]["version"] + matching_tags = [ + (sha, tag) + for (sha, tag) in listed_tags + if tag in [latest_with_cli_tag, latest_with_cli_tag.lstrip("v")] + ] + if not matching_tags: + # No versions with CLI found, fall back to building latest from source + sha, tag = listed_tags[-1] + return { + "sdk": sdk, + "alias": alias, + "head": True, # Mark as head to trigger source checkout + "sha": sha, + "tag": tag, + } + else: + matching_tags = listed_tags[-1:] + else: + if version == "lts": + version = LTS_VERSIONS[sdk] + matching_tags = [ + (sha, tag) for (sha, tag) in listed_tags if tag in [version, f"v{version}"] + ] + if not matching_tags: + raise ValueError(f"Tag [{version}] not found in [{sdk_url}]") + sha, tag = matching_tags[-1] + release = tag + if infix: + release = f"{infix}/{release}" + return { + "sdk": sdk, + "alias": alias, + "release": release, + "sha": sha, + "tag": tag, + } + except Exception as e: + return { + "sdk": sdk, + "alias": version, + "err": f"Error resolving version {version} for {sdk}: {e}", + } + + +def main() -> None: + """CLI entry point for backward-compatible resolve-version.py wrapper.""" + if len(sys.argv) < 3: + print("Usage: python resolve_version.py ", file=sys.stderr) + sys.exit(1) + + sdk = sys.argv[1] + versions = sys.argv[2:] + + if sdk not in SDK_GIT_URLS: + print(f"Unknown SDK: {sdk}", file=sys.stderr) + sys.exit(2) + infix = SDK_TAG_INFIXES.get(sdk) + + results: list[ResolveResult] = [] + shas: set[str] = set() + for version in versions: + v = resolve(sdk, version, infix) + if is_resolve_success(v): + env = lookup_additional_options(sdk, v["tag"]) + if env: + v["env"] = env + if v["sha"] in shas: + continue + shas.add(v["sha"]) + results.append(v) + + print(json.dumps(results)) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/semver.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/semver.py new file mode 100644 index 00000000..74581a7c --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/semver.py @@ -0,0 +1,38 @@ +"""Semantic version parsing and utilities.""" + +from __future__ import annotations + +import re + +SEMVER_RE = re.compile(r"^v?(\d+)\.(\d+)\.(\d+)(?:-(.+))?$") + + +def parse_semver(version: str) -> tuple[int, int, int, str | None] | None: + """Parse a semver string into (major, minor, patch, pre) or None.""" + m = SEMVER_RE.match(version) + if not m: + return None + return int(m.group(1)), int(m.group(2)), int(m.group(3)), m.group(4) + + +def is_stable(version: str) -> bool: + """Return True if version has no pre-release suffix.""" + parsed = parse_semver(version) + if parsed is None: + return False + return parsed[3] is None + + +def semver_sort_key(version: str) -> tuple[int, int, int, int, str]: + """Sort key for semver strings. Stable versions sort after pre-release.""" + parsed = parse_semver(version) + if parsed is None: + return (0, 0, 0, 0, version) + major, minor, patch, pre = parsed + return (major, minor, patch, 0 if pre else 1, pre or "") + + +def normalize_version(version: str) -> str: + """Normalize version string to v-prefixed form.""" + v = version.strip().lstrip("v") + return f"v{v}" diff --git a/otdf-sdk-mgr/uv.lock b/otdf-sdk-mgr/uv.lock new file mode 100644 index 00000000..321f06c7 --- /dev/null +++ b/otdf-sdk-mgr/uv.lock @@ -0,0 +1,263 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "gitdb" +version = "4.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, +] + +[[package]] +name = "gitpython" +version = "3.1.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371, upload-time = "2026-01-01T15:37:32.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620, upload-time = "2026-01-01T15:37:30.574Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "otdf-sdk-mgr" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "gitpython" }, + { name = "rich" }, + { name = "typer" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "gitpython", specifier = ">=3.1.46" }, + { name = "rich", specifier = ">=13.7.0" }, + { name = "typer", specifier = ">=0.12.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.408" }, + { name = "pytest", specifier = ">=8.0.0" }, + { name = "ruff", specifier = ">=0.9.0" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "rich" +version = "14.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" }, + { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" }, + { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" }, + { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" }, + { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" }, + { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" }, + { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" }, + { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" }, + { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" }, + { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "smmap" +version = "5.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, +] + +[[package]] +name = "typer" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/e6/44e073787aa57cd71c151f44855232feb0f748428fd5242d7366e3c4ae8b/typer-0.23.0.tar.gz", hash = "sha256:d8378833e47ada5d3d093fa20c4c63427cc4e27127f6b349a6c359463087d8cc", size = 120181, upload-time = "2026-02-11T15:22:18.637Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/ed/d6fca788b51d0d4640c4bc82d0e85bad4b49809bca36bf4af01b4dcb66a7/typer-0.23.0-py3-none-any.whl", hash = "sha256:79f4bc262b6c37872091072a3cb7cb6d7d79ee98c0c658b4364bdcde3c42c913", size = 56668, upload-time = "2026-02-11T15:22:21.075Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] diff --git a/xtest/sdk/go/cli.sh b/xtest/sdk/go/cli.sh index 9790a74c..c060e6f9 100755 --- a/xtest/sdk/go/cli.sh +++ b/xtest/sdk/go/cli.sh @@ -22,7 +22,12 @@ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) cmd=("$SCRIPT_DIR"/otdfctl) if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then - cmd=(go run "github.com/opentdf/otdfctl@latest") + if [ -f "$SCRIPT_DIR/.version" ]; then + OTDFCTL_VERSION=$(tr -d '[:space:]' < "$SCRIPT_DIR/.version") + cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}") + else + cmd=(go run "github.com/opentdf/otdfctl@latest") + fi fi if [ "$1" == "supports" ]; then diff --git a/xtest/sdk/go/otdfctl.sh b/xtest/sdk/go/otdfctl.sh index 1fbfbea7..27ec2f3b 100755 --- a/xtest/sdk/go/otdfctl.sh +++ b/xtest/sdk/go/otdfctl.sh @@ -17,7 +17,12 @@ source "$XTEST_DIR/test.env" cmd=("$SCRIPT_DIR"/otdfctl) if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then - cmd=(go run github.com/opentdf/otdfctl@latest) + if [ -f "$SCRIPT_DIR/.version" ]; then + OTDFCTL_VERSION=$(tr -d '[:space:]' < "$SCRIPT_DIR/.version") + cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}") + else + cmd=(go run "github.com/opentdf/otdfctl@latest") + fi fi cmd+=(--json) diff --git a/xtest/sdk/java/cli.sh b/xtest/sdk/java/cli.sh index 0153932a..b874f152 100755 --- a/xtest/sdk/java/cli.sh +++ b/xtest/sdk/java/cli.sh @@ -23,130 +23,130 @@ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) XTEST_DIR="$SCRIPT_DIR" while [ ! -f "$XTEST_DIR/test.env" ] && [ "$(basename "$XTEST_DIR")" != "xtest" ]; do - XTEST_DIR=$(dirname "$XTEST_DIR") + XTEST_DIR=$(dirname "$XTEST_DIR") done if [ -f "$XTEST_DIR/test.env" ]; then - # shellcheck disable=SC1091 - source "$XTEST_DIR/test.env" + # shellcheck disable=SC1091 + source "$XTEST_DIR/test.env" else - echo "test.env not found, stopping at xtest directory." - exit 1 + echo "test.env not found, stopping at xtest directory." + exit 1 fi if [ "$1" == "supports" ]; then - case "$2" in - autoconfigure | ns_grants) - exit 0 - ;; - assertions) - java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep with-assertions - exit $? - ;; - assertion_verification) - java -jar "$SCRIPT_DIR"/cmdline.jar help decrypt | grep with-assertion-verification-keys - exit $? - ;; - kasallowlist) - java -jar "$SCRIPT_DIR"/cmdline.jar help decrypt | grep kas-allowlist - exit $? - ;; - ecwrap) - if java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep encap-key; then - # versions 0.7.6 and earlier used an older value for EC HKDF salt; check for 0.7.7 or later - java -jar "$SCRIPT_DIR"/cmdline.jar --version | jq -re .version | awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 7) || ($1 == 0 && $2 == 7 && $3 >= 7)) exit 0; else exit 1; }' - exit $? - else - echo "ecwrap not supported" - exit 1 - fi - ;; - - hexless) - set -o pipefail - java -jar "$SCRIPT_DIR"/cmdline.jar --version | jq -re .tdfSpecVersion | awk -F. '{ if ($1 > 4 || ($1 == 4 && $2 > 2) || ($1 == 4 && $2 == 3 && $3 >= 0)) exit 0; else exit 1; }' - exit $? - ;; - - hexaflexible) - java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep with-target-mode - exit $? - ;; - - *) - echo "Unknown feature: $2" - exit 2 - ;; - esac + case "$2" in + autoconfigure | ns_grants) + exit 0 + ;; + assertions) + java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep with-assertions + exit $? + ;; + assertion_verification) + java -jar "$SCRIPT_DIR"/cmdline.jar help decrypt | grep with-assertion-verification-keys + exit $? + ;; + kasallowlist) + java -jar "$SCRIPT_DIR"/cmdline.jar help decrypt | grep kas-allowlist + exit $? + ;; + ecwrap) + if java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep encap-key; then + # versions 0.7.6 and earlier used an older value for EC HKDF salt; check for 0.7.7 or later + java -jar "$SCRIPT_DIR"/cmdline.jar --version | jq -re .version | awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 7) || ($1 == 0 && $2 == 7 && $3 >= 7)) exit 0; else exit 1; }' + exit $? + else + echo "ecwrap not supported" + exit 1 + fi + ;; + + hexless) + set -o pipefail + java -jar "$SCRIPT_DIR"/cmdline.jar --version | jq -re .tdfSpecVersion | awk -F. '{ if ($1 > 4 || ($1 == 4 && $2 > 2) || ($1 == 4 && $2 == 3 && $3 >= 0)) exit 0; else exit 1; }' + exit $? + ;; + + hexaflexible) + java -jar "$SCRIPT_DIR"/cmdline.jar help encrypt | grep with-target-mode + exit $? + ;; + + *) + echo "Unknown feature: $2" + exit 2 + ;; + esac fi args=( - "--client-id=$CLIENTID" - "--client-secret=$CLIENTSECRET" - "--plaintext" + "--client-id=$CLIENTID" + "--client-secret=$CLIENTSECRET" + "--plaintext" ) # when we added support for KAS allowlist, we changed the platform endpoint format to require scheme if java -jar "$SCRIPT_DIR"/cmdline.jar help decrypt | grep kas-allowlist; then - args+=("--platform-endpoint=$PLATFORMURL") + args+=("--platform-endpoint=$PLATFORMURL") else - args+=("--platform-endpoint=$PLATFORMENDPOINT") + args+=("--platform-endpoint=$PLATFORMENDPOINT") fi args+=("$1") if [ "$1" == "encrypt" ]; then - args+=("--kas-url=$KASURL") + args+=("--kas-url=$KASURL") - if [ "$XT_WITH_ECDSA_BINDING" == "true" ]; then - args+=(--ecdsa-binding) - fi + if [ "$XT_WITH_ECDSA_BINDING" == "true" ]; then + args+=(--ecdsa-binding) + fi - if [ "$XT_WITH_ECWRAP" == 'true' ]; then - args+=(--encap-key-type="ec:secp256r1") - fi + if [ "$XT_WITH_ECWRAP" == 'true' ]; then + args+=(--encap-key-type="ec:secp256r1") + fi - if [ "$XT_WITH_PLAINTEXT_POLICY" == "true" ]; then - args+=(--policy-type="plaintext") - fi + if [ "$XT_WITH_PLAINTEXT_POLICY" == "true" ]; then + args+=(--policy-type="plaintext") + fi else - if [ "$XT_WITH_ECWRAP" == 'true' ]; then - args+=(--rewrap-key-type="ec:secp256r1") - fi + if [ "$XT_WITH_ECWRAP" == 'true' ]; then + args+=(--rewrap-key-type="ec:secp256r1") + fi fi if [ "$1" == "decrypt" ]; then - if [ -n "$XT_WITH_KAS_ALLOW_LIST" ]; then - args+=(--kas-allowlist="$XT_WITH_KAS_ALLOW_LIST") - fi + if [ -n "$XT_WITH_KAS_ALLOW_LIST" ]; then + args+=(--kas-allowlist="$XT_WITH_KAS_ALLOW_LIST") + fi - if [ "$XT_WITH_IGNORE_KAS_ALLOWLIST" == "true" ]; then - args+=(--ignore-kas-allowlist=true) - fi + if [ "$XT_WITH_IGNORE_KAS_ALLOWLIST" == "true" ]; then + args+=(--ignore-kas-allowlist=true) + fi fi if [ -n "$XT_WITH_MIME_TYPE" ]; then - args+=(--mime-type "$XT_WITH_MIME_TYPE") + args+=(--mime-type "$XT_WITH_MIME_TYPE") fi if [ -n "$XT_WITH_ATTRIBUTES" ]; then - args+=(--attr "$XT_WITH_ATTRIBUTES") + args+=(--attr "$XT_WITH_ATTRIBUTES") fi if [ -n "$XT_WITH_ASSERTIONS" ]; then - args+=(--with-assertions "$XT_WITH_ASSERTIONS") + args+=(--with-assertions "$XT_WITH_ASSERTIONS") fi if [ -n "$XT_WITH_ASSERTION_VERIFICATION_KEYS" ]; then - args+=(--with-assertion-verification-keys "$XT_WITH_ASSERTION_VERIFICATION_KEYS") + args+=(--with-assertion-verification-keys "$XT_WITH_ASSERTION_VERIFICATION_KEYS") fi if [ "$XT_WITH_VERIFY_ASSERTIONS" == 'false' ]; then - args+=(--with-assertion-verification-disabled) + args+=(--with-assertion-verification-disabled) fi if [ -n "$XT_WITH_TARGET_MODE" ]; then - args+=(--with-target-mode "$XT_WITH_TARGET_MODE") + args+=(--with-target-mode "$XT_WITH_TARGET_MODE") fi echo java -jar "$SCRIPT_DIR"/cmdline.jar "${args[@]}" --file="$2" ">" "$3" diff --git a/xtest/sdk/js/cli.sh b/xtest/sdk/js/cli.sh index ffe6f915..463d6438 100755 --- a/xtest/sdk/js/cli.sh +++ b/xtest/sdk/js/cli.sh @@ -51,7 +51,7 @@ if [ "$1" == "supports" ]; then ecwrap) if npx $CTL help | grep encapKeyType; then # Claims to support ecwrap, but maybe with old salt? Look up version - npx $CTL --version | jq -re '.["@opentdf/sdk"]' | awk -F. '{ if ($1 > 2) exit 0; else exit 1; }' + npx $CTL --version | jq -re '.["@opentdf/sdk"]' | awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' exit $? else echo "ecwrap not supported" diff --git a/xtest/sdk/scripts/checkout-all.sh b/xtest/sdk/scripts/checkout-all.sh index e8c69ffb..ea644bd3 100755 --- a/xtest/sdk/scripts/checkout-all.sh +++ b/xtest/sdk/scripts/checkout-all.sh @@ -1,12 +1,13 @@ #!/bin/bash -# Checks out the latest `main` branch of each of the sdks under test -# and builds them. +# Backward-compatible wrapper. Use `otdf-sdk-mgr checkout --all` instead. SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) +PROJECT_DIR="$SCRIPT_DIR/../../../../otdf-sdk-mgr" -for sdk in go java js; do - if ! "$SCRIPT_DIR/checkout-sdk-branch.sh" "$sdk" main; then - echo "Failed to checkout $sdk main branch" - exit 1 - fi -done +if command -v uv &>/dev/null && [ -f "$PROJECT_DIR/pyproject.toml" ]; then + exec uv run --project "$PROJECT_DIR" otdf-sdk-mgr checkout --all +else + for sdk in go java js; do + "$SCRIPT_DIR/checkout-sdk-branch.sh" "$sdk" main || exit 1 + done +fi diff --git a/xtest/sdk/scripts/checkout-sdk-branch.sh b/xtest/sdk/scripts/checkout-sdk-branch.sh index a941125d..ef662496 100755 --- a/xtest/sdk/scripts/checkout-sdk-branch.sh +++ b/xtest/sdk/scripts/checkout-sdk-branch.sh @@ -1,71 +1,20 @@ #!/bin/bash -# Refreshes to the latest sdk at branch in the appropriate folder. +# Backward-compatible wrapper. Use `otdf-sdk-mgr checkout` instead. # # Usage: ./checkout-sdk-branch.sh [sdk language] [branch] # Example: ./checkout-sdk-branch.sh js main -# Resolve script directory SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) -XTEST_DIR=$(cd -- "$SCRIPT_DIR/../../" &>/dev/null && pwd) +PROJECT_DIR="$SCRIPT_DIR/../../../../otdf-sdk-mgr" -# Parse arguments -LANGUAGE=${1:-js} -BRANCH=${2:-main} - -# Replace slashes in branch name with double dashes for local naming -LOCAL_NAME=${BRANCH//\//--} - -# Strip well known prefixes for monorepo output -if [[ $LOCAL_NAME == sdk--* ]]; then - LOCAL_NAME=${LOCAL_NAME#sdk--} -fi - -case "$LANGUAGE" in - js) - BARE_REPO_PATH="$XTEST_DIR/sdk/js/src/web-sdk.git" - WORKTREE_PATH="$XTEST_DIR/sdk/js/src/$LOCAL_NAME" - REPO_URL="https://github.com/opentdf/web-sdk" - ;; - java) - BARE_REPO_PATH="$XTEST_DIR/sdk/java/src/java-sdk.git" - WORKTREE_PATH="$XTEST_DIR/sdk/java/src/$LOCAL_NAME" - REPO_URL="https://github.com/opentdf/java-sdk" - ;; - go) - BARE_REPO_PATH="$XTEST_DIR/sdk/go/src/otdfctl.git" - WORKTREE_PATH="$XTEST_DIR/sdk/go/src/$LOCAL_NAME" - REPO_URL="https://github.com/opentdf/otdfctl" - ;; - *) - echo "Error: Unsupported language '$LANGUAGE'. Supported values are 'js', 'java', or 'go'." >&2 - exit 1 - ;; -esac - -# Function to execute a command and handle errors -run_command() { - "$@" - local status=$? - if [[ $status -ne 0 ]]; then - echo "Error: Command '$*' failed." >&2 - exit $status - fi -} - -# Clone the repository as bare if it doesn't exist -if [[ ! -d $BARE_REPO_PATH ]]; then - echo "Cloning $REPO_URL as a bare repository into $BARE_REPO_PATH..." - run_command git clone --bare "$REPO_URL" "$BARE_REPO_PATH" -else - echo "Bare repository already exists at $BARE_REPO_PATH. Fetching updates..." - run_command git --git-dir="$BARE_REPO_PATH" fetch --all -fi - -# Check if the worktree for the specified branch exists -if [[ -d $WORKTREE_PATH ]]; then - echo "Worktree for branch '$BRANCH' already exists at $WORKTREE_PATH. Updating..." - run_command git --git-dir="$BARE_REPO_PATH" --work-tree="$WORKTREE_PATH" pull origin "$BRANCH" +if command -v uv &>/dev/null && [ -f "$PROJECT_DIR/pyproject.toml" ]; then + exec uv run --project "$PROJECT_DIR" otdf-sdk-mgr checkout "${1:-js}" "${2:-main}" else - echo "Setting up worktree for branch '$BRANCH' at $WORKTREE_PATH..." - run_command git --git-dir="$BARE_REPO_PATH" worktree add "$WORKTREE_PATH" "$BRANCH" + # Fallback: direct Python import (works in CI without uv) + exec python3 -c " +import sys +sys.path.insert(0, '$PROJECT_DIR/src') +from otdf_sdk_mgr.checkout import checkout_sdk_branch +checkout_sdk_branch('${1:-js}', '${2:-main}') +" fi diff --git a/xtest/sdk/scripts/cleanup-all.sh b/xtest/sdk/scripts/cleanup-all.sh index 4027bcb9..fa126a3f 100755 --- a/xtest/sdk/scripts/cleanup-all.sh +++ b/xtest/sdk/scripts/cleanup-all.sh @@ -1,17 +1,22 @@ #!/bin/bash -# Removes the checked out branches of each of the sdks under test +# Backward-compatible wrapper. Use `otdf-sdk-mgr clean` instead. SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) +PROJECT_DIR="$SCRIPT_DIR/../../../../otdf-sdk-mgr" -for sdk in go java js; do - rm -rf "$SCRIPT_DIR/../$sdk/dist" - for branch in "$SCRIPT_DIR/../${sdk}/src/"*; do - # Check if the path ends with .git - if [[ $branch == *.git ]]; then - continue - fi - if [ -d "$branch" ]; then - rm -rf "$branch" - fi +if command -v uv &>/dev/null && [ -f "$PROJECT_DIR/pyproject.toml" ]; then + exec uv run --project "$PROJECT_DIR" otdf-sdk-mgr clean +else + # Fallback: inline cleanup matching original behavior + for sdk in go java js; do + rm -rf "$SCRIPT_DIR/../$sdk/dist" + for branch in "$SCRIPT_DIR/../${sdk}/src/"*; do + if [[ $branch == *.git ]]; then + continue + fi + if [ -d "$branch" ]; then + rm -rf "$branch" + fi + done done -done +fi diff --git a/xtest/sdk/scripts/list-versions.py b/xtest/sdk/scripts/list-versions.py new file mode 100755 index 00000000..5eb5c998 --- /dev/null +++ b/xtest/sdk/scripts/list-versions.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +"""Backward-compatible wrapper. Use `otdf-sdk-mgr versions list` instead. + +Also re-exports key functions for any code that imports this module directly. +""" + +import sys +from pathlib import Path + +# tests/otdf-sdk-mgr/src/ is three levels up from xtest/sdk/scripts/ +sys.path.insert( + 0, + str(Path(__file__).resolve().parent.parent.parent.parent / "otdf-sdk-mgr" / "src"), +) + +# Backward-compat: list-versions.py main() with argparse +import argparse # noqa: E402 +import json # noqa: E402 +from typing import Any # noqa: E402 + +from otdf_sdk_mgr.registry import ( # noqa: E402, F401 + apply_filters, + list_go_versions, + list_java_github_releases, + list_java_maven_versions, + list_js_versions, +) +from otdf_sdk_mgr.semver import ( # noqa: E402, F401 + is_stable, + parse_semver, + semver_sort_key, +) + + +def print_table(entries: list[dict[str, Any]]) -> None: + if not entries: + print("(no results)") + return + cols = { + "sdk": 6, + "version": 20, + "source": 16, + "stable": 7, + "has_cli": 8, + "install_method": 60, + } + header = " ".join(k.upper().ljust(v) for k, v in cols.items()) + print(header) + print("-" * len(header)) + for entry in entries: + row = [] + for key, width in cols.items(): + val = entry.get(key, "") + if isinstance(val, bool): + val = "yes" if val else "no" + row.append(str(val)[:width].ljust(width)) + print(" ".join(row)) + + +def main() -> None: + parser = argparse.ArgumentParser( + description="List available released versions of OpenTDF SDK CLIs." + ) + parser.add_argument( + "sdk", + nargs="?", + default="all", + choices=["go", "js", "java", "all"], + help="SDK to query (default: all)", + ) + parser.add_argument( + "--stable", + action="store_true", + help="Only show stable (non-prerelease) versions", + ) + parser.add_argument( + "--latest", + type=int, + default=None, + metavar="N", + help="Show only the N most recent versions per source", + ) + parser.add_argument( + "--releases", + action="store_true", + help="Include GitHub Releases info for Java (slower)", + ) + output_group = parser.add_mutually_exclusive_group() + output_group.add_argument( + "--json", + action="store_true", + default=True, + dest="output_json", + help="JSON output (default)", + ) + output_group.add_argument( + "--table", + action="store_true", + help="Human-readable table output", + ) + args = parser.parse_args() + + sdks = ["go", "js", "java"] if args.sdk == "all" else [args.sdk] + all_entries: list[dict[str, Any]] = [] + + for sdk in sdks: + if sdk == "go": + entries = list_go_versions() + all_entries.extend( + apply_filters(entries, stable_only=args.stable, latest_n=args.latest) + ) + elif sdk == "js": + entries = list_js_versions() + all_entries.extend( + apply_filters(entries, stable_only=args.stable, latest_n=args.latest) + ) + elif sdk == "java": + maven_entries = list_java_maven_versions() + all_entries.extend( + apply_filters( + maven_entries, stable_only=args.stable, latest_n=args.latest + ) + ) + if args.releases: + gh_entries = list_java_github_releases() + all_entries.extend( + apply_filters( + gh_entries, stable_only=args.stable, latest_n=args.latest + ) + ) + + if args.table: + print_table(all_entries) + else: + print(json.dumps(all_entries, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/xtest/sdk/scripts/post-checkout-java.sh b/xtest/sdk/scripts/post-checkout-java.sh index 0336602e..8402e46e 100755 --- a/xtest/sdk/scripts/post-checkout-java.sh +++ b/xtest/sdk/scripts/post-checkout-java.sh @@ -1,93 +1,17 @@ #!/bin/bash +# Backward-compatible wrapper. Use `otdf-sdk-mgr java-fixup` instead. -# Post checkout cleanups for java -# Currently, this inserts the missing `platform.branch` property into the pom.xml files -# on older branches that do not have it defined. - -# Base directory for the script SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) -BASE_DIR="$SCRIPT_DIR/../java/src" +PROJECT_DIR="$SCRIPT_DIR/../../../../otdf-sdk-mgr" -# Detect the operating system to use the correct sed syntax -if [[ "$(uname)" == "Darwin" ]]; then - SED_CMD="sed -i ''" +if command -v uv &>/dev/null && [ -f "$PROJECT_DIR/pyproject.toml" ]; then + exec uv run --project "$PROJECT_DIR" otdf-sdk-mgr java-fixup else - SED_CMD="sed -i" + # Fallback: direct Python import + exec python3 -c " +import sys +sys.path.insert(0, '$PROJECT_DIR/src') +from otdf_sdk_mgr.java_fixup import post_checkout_java_fixup +post_checkout_java_fixup() +" fi - -# Map Java SDK version to compatible platform protocol branch -# Must match the mappings in resolve-version.py -get_platform_branch() { - local version="$1" - case "$version" in - 0.7.8|0.7.7) echo "protocol/go/v0.2.29" ;; - 0.7.6) echo "protocol/go/v0.2.25" ;; - 0.7.5|0.7.4) echo "protocol/go/v0.2.18" ;; - 0.7.3|0.7.2) echo "protocol/go/v0.2.17" ;; - 0.6.1|0.6.0) echo "protocol/go/v0.2.14" ;; - 0.5.0) echo "protocol/go/v0.2.13" ;; - 0.4.0|0.3.0|0.2.0) echo "protocol/go/v0.2.10" ;; - 0.1.0) echo "protocol/go/v0.2.3" ;; - *) echo "main" ;; # Default to main for unknown/newer versions - esac - return 0 -} - -# Loop through all subdirectories in the base directory -find "$BASE_DIR" -mindepth 1 -maxdepth 1 -type d -not -name "*.git" | while read -r SRC_DIR; do - POM_FILE="$SRC_DIR/sdk/pom.xml" - - # Skip if path or file does not exist - if [[ ! -f $POM_FILE ]]; then - echo "No pom.xml file found in $SRC_DIR, skipping." - continue - fi - - # Extract version from directory name (e.g., "v0.7.5" -> "0.7.5", "main" -> "main") - DIR_NAME=$(basename "$SRC_DIR") - VERSION="${DIR_NAME#v}" # Remove leading 'v' if present - PLATFORM_BRANCH=$(get_platform_branch "$VERSION") - - # Check if the correct platform.branch is already set - if grep -q "$PLATFORM_BRANCH" "$POM_FILE"; then - echo "platform.branch already set to $PLATFORM_BRANCH in $POM_FILE, skipping." - continue - fi - - # If we don't have a specific mapping for this version (defaults to "main"), - # check if the pom.xml already has a valid protocol/go branch set - don't overwrite it - if [[ "$PLATFORM_BRANCH" == "main" ]]; then - if grep -q "protocol/go/" "$POM_FILE"; then - EXISTING_BRANCH=$(grep -o "[^<]*" "$POM_FILE" | sed 's/<[^>]*>//g') - echo "platform.branch already set to $EXISTING_BRANCH in $POM_FILE (no mapping for version $VERSION), skipping." - continue - fi - fi - - echo "Updating $POM_FILE (version=$VERSION, platform.branch=$PLATFORM_BRANCH)..." - - # Check if platform.branch property exists (possibly with wrong value) - if grep -q "" "$POM_FILE"; then - # Replace existing platform.branch value with the correct one - $SED_CMD "s|[^<]*|$PLATFORM_BRANCH|g" "$POM_FILE" - echo "Updated existing platform.branch to $PLATFORM_BRANCH in $POM_FILE" - else - # Add the platform.branch property to the section - $SED_CMD "//a \\ - $PLATFORM_BRANCH" "$POM_FILE" - - # Only replace branch=main if the property now exists (sed above may have failed silently if no section) - if grep -q "" "$POM_FILE"; then - # Replace hardcoded branch=main with branch=${platform.branch} in the maven-antrun-plugin configuration - # shellcheck disable=SC2016 # Literal $; it is for a variable expansion in the maven file - $SED_CMD 's/branch=main/branch=${platform.branch}/g' "$POM_FILE" - echo "Added platform.branch=$PLATFORM_BRANCH and updated branch references in $POM_FILE" - else - # No section exists, directly replace branch=main with the actual branch value - $SED_CMD "s|branch=main|branch=$PLATFORM_BRANCH|g" "$POM_FILE" - echo "No section, directly replaced branch=main with branch=$PLATFORM_BRANCH in $POM_FILE" - fi - fi -done - -echo "Update complete." diff --git a/xtest/sdk/scripts/requirements.txt b/xtest/sdk/scripts/requirements.txt index 529b57ea..02905791 100644 --- a/xtest/sdk/scripts/requirements.txt +++ b/xtest/sdk/scripts/requirements.txt @@ -1 +1,3 @@ +# Minimal dependencies for backward-compatible wrappers. +# The canonical source is tests/otdf-sdk-mgr/pyproject.toml. GitPython==3.1.46 diff --git a/xtest/sdk/scripts/resolve-version.py b/xtest/sdk/scripts/resolve-version.py index ca6655c1..d380b9ca 100755 --- a/xtest/sdk/scripts/resolve-version.py +++ b/xtest/sdk/scripts/resolve-version.py @@ -1,374 +1,25 @@ #!/usr/bin/env python3 -# Use: python3 resolve-version.py -# -# Tag can be: -# main: the main branch -# latest: the latest release of the app (last tag) -# lts: one of a list of hard-coded 'supported' versions -# : a git SHA -# v0.1.2: a git tag that is a semantic version -# refs/pull/1234: a pull request ref -# -# The script will resolve the tags to their git SHAs and return it and other metadata in a JSON formatted list of objects. -# Fields of the object will be: -# sdk: the SDK name -# alias: the tag that was requested -# head: true if the tag is a head of a live branch -# tag: the resolved tag or branch name, if found -# sha: the current git SHA of the tag -# err: an error message if the tag could not be resolved, or resolved to multiple items -# pr: if set, the pr number associated with the tag -# release: if set, the release page for the tag -# -# The script will also check for duplicate SHAs and remove them from the output. -# -# Sample Input: -# -# python3 resolve-version.py go 0.15.0 latest decaf01 unreleased-name -# -# Sample Output: -# ```json -# [ -# { -# "sdk": "go", -# "alias": "0.15.0", -# "env": "ADDITIONAL_OPTION=per build metadata", -# "release": "v0.15.0", -# "sha": "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0", -# "tag": "v0.15.0" -# }, -# { -# "sdk": "go", -# "alias": "latest", -# "release": "v0.15.1", -# "sha": "c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0a1b2", -# "tag": "v0.15.1" -# }, -# { -# "sdk": "go", -# "alias": "decaf01", -# "head": true, -# "pr": "1234", -# "sha": "decaf016g7h8i9j0k1l2m3n4o5p6q7r8s9t0a1b2", -# "tag": "refs/pull/1234/head" -# }, -# { -# "sdk": "go", -# "err": "not found", -# "tag": "unreleased-name" -# } -# ] -# ``` +"""Backward-compatible wrapper. Use `otdf-sdk-mgr versions resolve` instead.""" -import json -import re import sys -from typing import NotRequired, TypedDict, TypeGuard -from urllib.parse import quote - -from git import Git - - -class ResolveSuccess(TypedDict): - sdk: str # The SDK name - alias: str # The tag that was requested - env: NotRequired[str] # Additional options for the SDK - head: NotRequired[bool] # True if the tag is a head of a live branch - pr: NotRequired[str] # The pull request number associated with the tag - release: NotRequired[str] # The release name for the tag - sha: str # The current git SHA of the tag - tag: str # The resolved tag name - - -class ResolveError(TypedDict): - sdk: str # The SDK name - alias: str # The tag that was requested - err: str # The error message - - -ResolveResult = ResolveSuccess | ResolveError - - -def is_resolve_error(val: ResolveResult) -> TypeGuard[ResolveError]: - """Check if the given value is a ResolveError type.""" - return "err" in val - - -def is_resolve_success(val: ResolveResult) -> TypeGuard[ResolveSuccess]: - """Check if the given value is a ResolveSuccess type.""" - return "err" not in val and "sha" in val and "tag" in val - - -sdk_urls = { - "go": "https://github.com/opentdf/otdfctl.git", - "java": "https://github.com/opentdf/java-sdk.git", - "js": "https://github.com/opentdf/web-sdk.git", - "platform": "https://github.com/opentdf/platform.git", -} - -lts_versions = { - "go": "0.24.0", - "java": "0.9.0", - "js": "0.4.0", - "platform": "0.9.0", -} - - -merge_queue_regex = r"^refs/heads/gh-readonly-queue/(?P[^/]+)/pr-(?P\d+)-(?P[a-f0-9]{40})$" - -sha_regex = r"^[a-f0-9]{7,40}$" - - -def lookup_additional_options(sdk: str, version: str) -> str | None: - if sdk != "java": - return None - if version.startswith("v"): - version = version[1:] - match version: - case "0.7.8" | "0.7.7": - return "PLATFORM_BRANCH=protocol/go/v0.2.29" - case "0.7.6": - return "PLATFORM_BRANCH=protocol/go/v0.2.25" - case "0.7.5" | "0.7.4": - return "PLATFORM_BRANCH=protocol/go/v0.2.18" - case "0.7.3" | "0.7.2": - return "PLATFORM_BRANCH=protocol/go/v0.2.17" - case "0.6.1" | "0.6.0": - return "PLATFORM_BRANCH=protocol/go/v0.2.14" - case "0.5.0": - return "PLATFORM_BRANCH=protocol/go/v0.2.13" - case "0.4.0" | "0.3.0" | "0.2.0": - return "PLATFORM_BRANCH=protocol/go/v0.2.10" - case "0.1.0": - return "PLATFORM_BRANCH=protocol/go/v0.2.3" - case _: - return None - - -def resolve(sdk: str, version: str, infix: None | str) -> ResolveResult: - sdk_url = sdk_urls[sdk] - try: - repo = Git() - if version == "main" or version == "refs/heads/main": - all_heads = [ - r.split("\t") for r in repo.ls_remote(sdk_url, heads=True).split("\n") - ] - sha, _ = [tag for tag in all_heads if "refs/heads/main" in tag][0] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": "main", - } - - if re.match(sha_regex, version): - ls_remote = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] - matching_tags = [ - (sha, tag) for (sha, tag) in ls_remote if sha.startswith(version) - ] - if not matching_tags: - # Not a head; maybe another commit has pushed to this branch since the job started - return { - "sdk": sdk, - "alias": version[:7], - "sha": version, - "tag": version, - } - if len(matching_tags) > 1: - # If multiple tags point to the same SHA, check for pull requests - # and return the first one. - for sha, tag in matching_tags: - if tag.startswith("refs/pull/"): - pr_number = tag.split("/")[2] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": f"pull-{pr_number}", - } - # No pull request, probably a feature branch or release branch - for sha, tag in matching_tags: - mq_match = re.match(merge_queue_regex, tag) - if mq_match: - to_branch = mq_match.group("branch") - pr_number = mq_match.group("pr_number") - if to_branch and pr_number: - return { - "sdk": sdk, - "alias": version, - "head": True, - "pr": pr_number, - "sha": sha, - "tag": f"mq-{to_branch}-{pr_number}", - } - suffix = tag.split("refs/heads/gh-readonly-queue/")[-1] - flattag = "mq--" + suffix.replace("/", "--") - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": flattag, - } - head = False - if tag.startswith("refs/heads/"): - head = True - tag = tag.split("refs/heads/")[-1] - flattag = tag.replace("/", "--") - return { - "sdk": sdk, - "alias": version, - "head": head, - "sha": sha, - "tag": flattag, - } - - return { - "sdk": sdk, - "alias": version, - "err": f"SHA {version} points to multiple tags, unable to differentiate: {', '.join(tag for _, tag in matching_tags)}", - } - (sha, tag) = matching_tags[0] - if tag.startswith("refs/tags/"): - tag = tag.split("refs/tags/")[-1] - if infix: - tag = tag.split(f"{infix}/")[-1] - return { - "sdk": sdk, - "alias": version, - "sha": sha, - "tag": tag, - } - - if version.startswith("refs/pull/"): - merge_heads = [ - r.split("\t") - for r in repo.ls_remote(sdk_url).split("\n") - if r.endswith(version) - ] - pr_number = version.split("/")[2] - if not merge_heads: - return { - "sdk": sdk, - "alias": version, - "err": f"pull request {pr_number} not found in {sdk_url}", - } - sha, _ = merge_heads[0] - return { - "sdk": sdk, - "alias": version, - "head": True, - "pr": pr_number, - "sha": sha, - "tag": f"pull-{pr_number}", - } - - remote_tags = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] - all_listed_tags = [ - (sha, tag.split("refs/tags/")[-1]) - for (sha, tag) in remote_tags - if "refs/tags/" in tag - ] - - all_listed_branches = { - tag.split("refs/heads/")[-1]: sha - for (sha, tag) in remote_tags - if tag.startswith("refs/heads/") - } - - if version in all_listed_branches: - sha = all_listed_branches[version] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": version, - } - - if infix and version.startswith(f"{infix}/"): - version = version.split(f"{infix}/")[-1] - - listed_tags = all_listed_tags - if infix: - listed_tags = [ - (sha, tag.split(f"{infix}/")[-1]) - for (sha, tag) in listed_tags - if f"{infix}/" in tag - ] - semver_regex = r"v?\d+\.\d+\.\d+$" - listed_tags = [ - (sha, tag) for (sha, tag) in listed_tags if re.search(semver_regex, tag) - ] - listed_tags.sort(key=lambda item: list(map(int, item[1].strip("v").split(".")))) - alias = version - matching_tags = [] - if version == "latest": - matching_tags = listed_tags[-1:] - else: - if version == "lts": - version = lts_versions[sdk] - matching_tags = [ - (sha, tag) - for (sha, tag) in listed_tags - if tag in [version, f"v{version}"] - ] - if not matching_tags: - raise ValueError(f"Tag [{version}] not found in [{sdk_url}]") - sha, tag = matching_tags[-1] - release = tag - if infix: - release = f"{infix}/{release}" - release = quote(release, safe="-_.~") - return { - "sdk": sdk, - "alias": alias, - "release": release, - "sha": sha, - "tag": tag, - } - except Exception as e: - return { - "sdk": sdk, - "alias": version, - "err": f"Error resolving version {version} for {sdk}: {e}", - } - - -def main(): - if len(sys.argv) < 3: - print("Usage: python resolve_version.py ", file=sys.stderr) - sys.exit(1) - - sdk = sys.argv[1] - versions = sys.argv[2:] - - if sdk not in sdk_urls: - print(f"Unknown SDK: {sdk}", file=sys.stderr) - sys.exit(2) - infix: None | str = None - if sdk == "js": - infix = "sdk" - if sdk == "platform": - infix = "service" - - results: list[ResolveResult] = [] - shas: set[str] = set() - for version in versions: - v = resolve(sdk, version, infix) - if is_resolve_success(v): - env = lookup_additional_options(sdk, v["tag"]) - if env: - v["env"] = env - if v["sha"] in shas: - continue - shas.add(v["sha"]) - results.append(v) - - print(json.dumps(results)) - +from pathlib import Path + +# tests/otdf-sdk-mgr/src/ is three levels up from xtest/sdk/scripts/ +sys.path.insert( + 0, + str(Path(__file__).resolve().parent.parent.parent.parent / "otdf-sdk-mgr" / "src"), +) + +# Re-export types and constants for any code that imports this module directly +from otdf_sdk_mgr.config import LTS_VERSIONS as lts_versions # noqa: E402, F401, N812 +from otdf_sdk_mgr.resolve import ( # noqa: E402, F401 + ResolveError, + ResolveResult, + ResolveSuccess, + is_resolve_error, + is_resolve_success, + main, +) if __name__ == "__main__": main() diff --git a/xtest/setup-cli-tool/action.yaml b/xtest/setup-cli-tool/action.yaml index 24fa04e5..840842e3 100644 --- a/xtest/setup-cli-tool/action.yaml +++ b/xtest/setup-cli-tool/action.yaml @@ -76,9 +76,33 @@ runs: env: version_info: ${{ inputs.version-info }} + - name: install released versions + shell: bash + run: | + SDK_MGR_DIR="$(cd "${{ inputs.path }}/../.." && pwd)/otdf-sdk-mgr" + for row in $(echo "${version_info}" | jq -c '.[]'); do + tag=$(echo "$row" | jq -r '.tag') + head=$(echo "$row" | jq -r '.head // false') + release=$(echo "$row" | jq -r '.release // empty') + if [[ "$head" != "true" && -n "$release" ]]; then + echo "Installing ${{ inputs.sdk }} $tag from registry (release: $release)" + if ! uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr install artifact \ + --sdk "${{ inputs.sdk }}" --version "$release" \ + --dist-name "$tag"; then + echo " Warning: Artifact installation failed for ${{ inputs.sdk }} $tag" + echo " Will fall back to building from source" + echo "BUILD_FROM_SOURCE_$tag=true" >> "$GITHUB_ENV" + fi + fi + done + env: + version_info: ${{ inputs.version-info }} + - name: checkout version a uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - if: steps.resolve.outputs.version-a != '' + if: >- + steps.resolve.outputs.version-a != '' + && fromJson(steps.resolve.outputs.version-a).head == true with: path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-a).tag }} persist-credentials: false @@ -87,7 +111,9 @@ runs: - name: checkout version b uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - if: steps.resolve.outputs.version-b != '' + if: >- + steps.resolve.outputs.version-b != '' + && fromJson(steps.resolve.outputs.version-b).head == true with: path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-b).tag }} persist-credentials: false @@ -96,7 +122,9 @@ runs: - name: checkout version c uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - if: steps.resolve.outputs.version-c != '' + if: >- + steps.resolve.outputs.version-c != '' + && fromJson(steps.resolve.outputs.version-c).head == true with: path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-c).tag }} persist-credentials: false @@ -105,7 +133,9 @@ runs: - name: checkout version d uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - if: steps.resolve.outputs.version-d != '' + if: >- + steps.resolve.outputs.version-d != '' + && fromJson(steps.resolve.outputs.version-d).head == true with: path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-d).tag }} persist-credentials: false @@ -115,7 +145,8 @@ runs: - name: post checkout cleanups if: inputs.sdk == 'java' run: | - ${{ inputs.path }}/scripts/post-checkout-java.sh + SDK_MGR_DIR="$(cd "${{ inputs.path }}/../.." && pwd)/otdf-sdk-mgr" + uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr java-fixup shell: bash - name: save env from version-info @@ -131,4 +162,3 @@ runs: done env: version_info: ${{ inputs.version-info }} -