diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml new file mode 100644 index 0000000..b00c680 --- /dev/null +++ b/.github/workflows/dev.yml @@ -0,0 +1,31 @@ +name: dev +on: + workflow_dispatch: + push: + paths: + - bin/** + - glrd/** + - tests/** + - poetry.lock + - pyproject.toml +jobs: + test: + name: test + runs-on: "ubuntu-latest" + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v4 + - name: Install Poetry + uses: snok/install-poetry@v1 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.13 + - name: Install GLRD and dependencies + run: poetry install --with dev + - name: Run linter + run: make lint + - name: Run tests + run: make test diff --git a/.github/workflows/image-dev.yml b/.github/workflows/image-dev.yml index 205ae26..21e1713 100644 --- a/.github/workflows/image-dev.yml +++ b/.github/workflows/image-dev.yml @@ -3,10 +3,11 @@ on: workflow_dispatch: push: tags-ignore: - - 'v[0-9]+.[0-9]+.[0-9]+' + - "v[0-9]+.[0-9]+.[0-9]+" paths: - bin/** - glrd/** + - tests/** - Containerfile - poetry.lock - pyproject.toml @@ -20,7 +21,7 @@ jobs: strategy: fail-fast: false matrix: - arch: [ amd64, arm64 ] + arch: [amd64, arm64] steps: - uses: actions/checkout@v4 with: @@ -41,10 +42,10 @@ jobs: name: build-${{ matrix.arch }} path: /tmp/${{ matrix.arch }}-oci.tar if-no-files-found: error - retention-days: 1 + retention-days: 1 push: name: push - runs-on: 'ubuntu-latest' + runs-on: "ubuntu-latest" defaults: run: shell: bash @@ -56,12 +57,12 @@ jobs: uses: actions/download-artifact@v4 with: name: build-amd64 - path: /tmp + path: /tmp - name: download build-arm64 uses: actions/download-artifact@v4 with: name: build-arm64 - path: /tmp + path: /tmp - name: podman login run: | podman login -u token -p ${{ github.token }} ghcr.io diff --git a/.gitignore b/.gitignore index 139db12..39614ed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,10 @@ *.egg-info **/__pycache__ -.artifacts_cache.json -.artifacts_index.json .envrc .venv _build dist/ +glrd/artifacts-cache.json shell.nix releases.json releases.yaml diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d5fcd96 --- /dev/null +++ b/Makefile @@ -0,0 +1,43 @@ +# GLRD Makefile + +.PHONY: help install test test-unit test-integration test-all lint clean + +help: ## Show this help message + @echo "Available targets:" + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-20s\033[0m %s\n", $$1, $$2}' + +install: ## Install dependencies + poetry install + +install-dev: ## Install development dependencies + poetry install --with dev + +test: ## Run all tests + poetry run pytest + +test-unit: ## Run unit tests only + poetry run pytest -m unit + +test-integration: ## Run integration tests only + poetry run pytest -m integration + +test-all: ## Run all tests with coverage + poetry run pytest --cov=glrd --cov-report=html --cov-report=term + +lint: ## Run linting + poetry run flake8 --max-line-length 110 glrd/ tests/ + poetry run black --check glrd/ tests/ + poetry run autopep8 --diff --max-line-length 110 -r glrd/ tests/ + +format: ## Format code + poetry run black glrd/ tests/ + +clean: ## Clean up temporary files + find . -type f -name "*.pyc" -delete + find . -type d -name "__pycache__" -delete + find . -type d -name "*.egg-info" -exec rm -rf {} + + rm -rf build/ + rm -rf dist/ + rm -rf htmlcov/ + rm -rf .coverage + rm -rf .pytest_cache/ diff --git a/README.md b/README.md index 3624d6a..b34ff65 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ For a general overview about Garden Linux releases and their lifecycle, have a l ## Overview -The GLRD provides a structured way to store and access release data for Garden Linux, including different release types such as stable, patch, nightly, and development releases. It uses JSON and YAML formats to store release information and supports integration with AWS S3 for storage to host release data.. +The GLRD provides a structured way to store and access release data for Garden Linux, including different release types such as major, minor, nightly, and development releases. It uses JSON and YAML formats to store release information and supports integration with AWS S3 for storage to host release data.. ![Overview](assets/overview.png) @@ -58,6 +58,7 @@ git clone https://github.com/gardenlinux/glrd.git cd glrd poetry install ``` + ### Run in container @@ -91,12 +92,12 @@ podman run -it --rm -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_SESSION Details ``` - - name: Get latest GL patch version + - name: Get latest GL minor version id: gl_version_latest uses: gardenlinux/glrd@main with: - cmd: glrd --no-header --type patch --latest --fields Version - - name: Use latest GL patch version + cmd: glrd --no-header --type minor --latest --fields Version + - name: Use latest GL minor version run: echo ${{ steps.gl_version_latest.outputs.result }} - name: Get latest GL nightly version @@ -119,16 +120,16 @@ You can manually query the release JSON data by getting them from our production Query the release type that you want: -- `releases-stable.json` -- `releases-patch.json` +- `releases-major.json` +- `releases-minor.json` - `releases-nightly.json` - `releases-dev.json` -To e.g. query the stable releases: +To e.g. query the major releases: ```bash -❯ curl -s https://gardenlinux-glrd.s3.eu-central-1.amazonaws.com/releases-stable.json -{"releases":[{"name":"stable-27","type":"stable","version":{"major":27},"lifecycle":{"released":{"isodate":"2020-06-09","timestamp":1591694693},"extended":{"isodate":"2020-12-09","timestamp":1607472000},"eol":{"isodate":"2021-03-09","timestamp":1615248000}}},{"name":"stable-184","type":"stable","version":{"major":184},"lifecycle":{"released":{"isodate":"2020-10-29","timestamp":1603984625},"extended":{"isodate":"2020-04-29","timestamp":1588118400},"eol":{"isodate":"2021-07-29","timestamp":1627516800}}},{"name":"stable-318","type":"stable","version":{"major":318},"lifecycle":{"released":{"isodate":"2021-04-28","timestamp":1619614135},"extended":{"isodate":"2021-10-28","timestamp":1635379200},"eol":{"isodate":"2023-01-28","timestamp":1674864000}}},{"name":"stable-576","type":"stable","version":{"major":576},"lifecycle":{"released":{"isodate":"2021-11-17","timestamp":1637142852},"extended":{"isodate":"2023-05-17","timestamp":1684281600},"eol":{"isodate":"2023-08-17","timestamp":1692230400}}},{"name":"stable-934","type":"stable","version":{"major":934},"lifecycle":{"released":{"isodate":"2023-06-05","timestamp":1685968163},"extended":{"isodate":"2023-12-05","timestamp":1701734400},"eol":{"isodate":"2024-03-05","timestamp":1709596800}}},{"name":"stable-1312","type":"stable","version":{"major":1312},"lifecycle":{"released":{"isodate":"2023-11-16","timestamp":1700136050},"extended":{"isodate":"2024-05-03","timestamp":1714694400},"eol":{"isodate":"2024-08-03","timestamp":1722643200}}},{"name":"stable-1443","type":"stable","version":{"major":1443},"lifecycle":{"released":{"isodate":"2024-03-13","timestamp":1710341636},"extended":{"isodate":"2024-09-13","timestamp":1726185600},"eol":{"isodate":"2025-01-13","timestamp":1736726400}}},{"name":"stable-1592","type":"stable","version":{"major":1592},"lifecycle":{"released":{"isodate":"2024-08-12","timestamp":1723457202},"extended":{"isodate":"2025-05-12","timestamp":1747008000},"eol":{"isodate":"2025-08-12","timestamp":1754956800}}}]} +❯ curl -s https://gardenlinux-glrd.s3.eu-central-1.amazonaws.com/releases-major.json +{"releases":[{"name":"major-27","type":"major","version":{"major":27},"lifecycle":{"released":{"isodate":"2020-06-09","timestamp":1591694693},"extended":{"isodate":"2020-12-09","timestamp":1607472000},"eol":{"isodate":"2021-03-09","timestamp":1615248000}}},{"name":"major-184","type":"major","version":{"major":184},"lifecycle":{"released":{"isodate":"2020-10-29","timestamp":1603984625},"extended":{"isodate":"2020-04-29","timestamp":1588118400},"eol":{"isodate":"2021-07-29","timestamp":1627516800}}},{"name":"major-318","type":"major","version":{"major":318},"lifecycle":{"released":{"isodate":"2021-04-28","timestamp":1619614135},"extended":{"isodate":"2021-10-28","timestamp":1635379200},"eol":{"isodate":"2023-01-28","timestamp":1674864000}}},{"name":"major-576","type":"major","version":{"major":576},"lifecycle":{"released":{"isodate":"2021-11-17","timestamp":1637142852},"extended":{"isodate":"2023-05-17","timestamp":1684281600},"eol":{"isodate":"2023-08-17","timestamp":1692230400}}},{"name":"major-934","type":"major","version":{"major":934},"lifecycle":{"released":{"isodate":"2023-06-05","timestamp":1685968163},"extended":{"isodate":"2023-12-05","timestamp":1701734400},"eol":{"isodate":"2024-03-05","timestamp":1709596800}}},{"name":"major-1312","type":"major","version":{"major":1312},"lifecycle":{"released":{"isodate":"2023-11-16","timestamp":1700136050},"extended":{"isodate":"2024-05-03","timestamp":1714694400},"eol":{"isodate":"2024-08-03","timestamp":1722643200}}},{"name":"major-1443","type":"major","version":{"major":1443},"lifecycle":{"released":{"isodate":"2024-03-13","timestamp":1710341636},"extended":{"isodate":"2024-09-13","timestamp":1726185600},"eol":{"isodate":"2025-01-13","timestamp":1736726400}}},{"name":"major-1592","type":"major","version":{"major":1592},"lifecycle":{"released":{"isodate":"2024-08-12","timestamp":1723457202},"extended":{"isodate":"2025-05-12","timestamp":1747008000},"eol":{"isodate":"2025-08-12","timestamp":1754956800}}}]} ``` @@ -162,7 +163,7 @@ options: Specify if the input type (default: url). --input-url INPUT_URL Input URL to the releases data. Defaults to gardenlinux-glrd S3 URL. - --no-input-split Do not split Input into stable+patch and nightly. No additional input-files *-nightly and *-dev will be parsed. + --no-input-split Do not split Input into major+minor and nightly. No additional input-files *-nightly and *-dev will be parsed. --output-format {json,yaml,markdown,mermaid_gantt,shell} Output format: json, yaml, markdown, mermaid_gantt, shell (default). --output-description OUTPUT_DESCRIPTION @@ -170,8 +171,8 @@ options: --active Show only active releases. --archived Show only archived releases. --latest Show the latest active major.minor release. - --type TYPE Filter by release types (comma-separated list, default: stable,patch). E.g., --type stable,patch,nightly,dev,next - --version VERSION Filter by a specific version (major or major.minor). E.g., --version 1312 or --version 1312.0 + --type TYPE Filter by release types (comma-separated list, default: major,minor). E.g., --type major,minor,nightly,dev,next + --version VERSION Filter by a specific version (major, major.minor, or major.minor.patch). E.g., --version 1312, --version 1312.0, or --version 2000.0.0 --fields FIELDS Comma-separated list of fields to output. Possible fields: Name,Version,Type,GitCommit,GitCommitShort,ReleaseDate,ReleaseTime,ExtendedMaintenance,EndOfMaintenance,Flavors,OCI,AttributesSourceRepo (default: Name,Version,Type,GitCommitShort,ReleaseDate,ExtendedMaintenance,EndOfMaintenance) --no-header Omit the header in shell output. -V show program's version number and exit @@ -185,7 +186,7 @@ options: # default shell output ❯ glrd --latest Name Version Type GitCommitShort ReleaseDate ExtendedMaintenance EndOfMaintenance -patch-1592.6 1592.6 patch cb05e11f 2025-02-19 N/A 2025-08-12 +minor-1592.6 1592.6 minor cb05e11f 2025-02-19 N/A 2025-08-12 ``` #### Get only version field @@ -205,11 +206,12 @@ patch-1592.6 1592.6 patch cb05e11f 2025-02-19 N/A { "releases": [ { - "name": "patch-1592.6", - "type": "patch", + "name": "minor-1592.6", + "type": "minor", "version": { "major": 1592, "minor": 6 + // Note: patch field is only present for versions ≥ 2000.0.0 }, "lifecycle": { "released": { @@ -398,29 +400,33 @@ patch-1592.6 1592.6 patch cb05e11f 2025-02-19 N/A ] } ``` + #### Get json output and filter for version + ``` ❯ glrd --latest --output-format json | jq -r '.releases[] | "\(.version.major).\(.version.minor)"' 1592.6 ``` +**Note**: For versions ≥ 2000.0.0, you can also filter by patch version: `jq -r '.releases[] | "\(.version.major).\(.version.minor).\(.version.patch)"'` + ### Get all active and supported Garden Linux Versions ``` ❯ glrd --active -Name Version Type Git Commit Release date Extended maintenance End of maintenance -stable-1443 1443 stable N/A 2024-03-13 2024-09-13 2025-01-13 -patch-1443.15 1443.15 patch 5d33a69 2024-10-10 N/A 2025-01-13 -stable-1592 1592 stable N/A 2024-08-12 2025-05-12 2025-08-12 -patch-1592.1 1592.1 patch ec945aa 2024-08-22 N/A 2025-08-12 +Name Version Type Git Commit Release date Extended maintenance End of maintenance +major-1443 1443 major N/A 2024-03-13 2024-09-13 2025-01-13 +minor-1443.15 1443.15 minor 5d33a69 2024-10-10 N/A 2025-01-13 +major-1592 1592 major N/A 2024-08-12 2025-05-12 2025-08-12 +minor-1592.1 1592.1 minor ec945aa 2024-08-22 N/A 2025-08-12 ``` ### Create [Mermaid Gantt Chart](https://mermaid.js.org/syntax/gantt.html) for active releases ``` -❯ glrd --active --type next,stable --output-format mermaid_gantt --output-description "Garden Linux active Releases" +❯ glrd --active --type next,major --output-format mermaid_gantt --output-description "Garden Linux active Releases" gantt title Garden Linux active Releases axisFormat %m.%y @@ -441,7 +447,7 @@ gantt Standard maintenance: task, 2024-12-01, 6M Extended maintenance: milestone, 2025-06-01, 0m Extended maintenance: task, 2025-06-01, 3M - End of maintenance: milestone, 2025-09-01, 0m + End of maintenance: milestone, 2025-09-01, 0m ``` ## glrd-manage @@ -475,9 +481,9 @@ options: Prefix for S3 bucket objects. Defaults to empty string. --delete DELETE Delete a release by name (format: type-major.minor). Requires --s3-update. --create-initial-releases CREATE_INITIAL_RELEASES - Comma-separated list of initial releases to retrieve and generate: 'stable,patch,nightly'. - --create CREATE Create a release for this type using the current timestamp and git information (choose one of: stable,patch,nightly,dev,next)'. - --version VERSION Manually specify the version (format: major.minor). + Comma-separated list of initial releases to retrieve and generate: 'major,minor,nightly'. + --create CREATE Create a release for this type using the current timestamp and git information (choose one of: major,minor,nightly,dev,next)'. + --version VERSION Manually specify the version (format: major.minor for versions < 2000.0.0, major.minor.patch for versions ≥ 2000.0.0). --commit COMMIT Manually specify the git commit hash (40 characters). --lifecycle-released-isodatetime LIFECYCLE_RELEASED_ISODATETIME Manually specify the release date and time in ISO format (YYYY-MM-DDTHH:MM:SS). @@ -488,7 +494,7 @@ options: --no-query Do not query and use existing releases using glrd command. Be careful, this can delete your releases. --input-stdin Process a single input from stdin (JSON data). --input Process input from --input-file. - --no-output-split Do not split Output into stable+patch and nightly. Additional output-files *-nightly and *-dev will not be created. + --no-output-split Do not split Output into major+minor and nightly. Additional output-files *-nightly and *-dev will not be created. --s3-create-bucket Create an S3 bucket. --s3-update Update (merge) the generated files with S3. --output-all Download and write all release files found in S3 to local disk @@ -506,15 +512,15 @@ Without passing `--s3-update`, no actual update will be made and changes can saf This will generate the following initial release data ... -- stable releases -- patch releases +- major releases +- minor releases - nightly releases - releases from `releases-input.yaml` (contain manual lifecycle fields) ... and upload it to the default S3 bucket (if `--s3-update` is passed). ``` -❯ glrd-manage --create-initial-releases stable,patch,nightly --input +❯ glrd-manage --create-initial-releases major,minor,nightly --input ``` ### Generate/Update an arbitrary release from JSON/YAML data @@ -525,8 +531,8 @@ This will generate/update a release from JSON data and upload it to the default ❯ echo '{ "releases": [ { - "name": "patch-1592.1", - "type": "patch", + "name": "minor-1592.1", + "type": "minor", "version": { "major": 1592, "minor": 1 @@ -567,8 +573,8 @@ Another approach is writing release YAML data to an input file and use this file ``` ❯ cat releases-input.yaml releases: - - name: patch-1592.1 - type: patch + - name: minor-1592.1 + type: minor version: major: 1592 minor: 1 @@ -595,31 +601,31 @@ releases: ❯ glrd-manage --input --input-file releases-input.yaml ``` -### Create or update a stable release +### Create or update a major release -https://github.com/gardenlinux/glrd?tab=readme-ov-file#default-stable-dates -When creating a new stable release, [Default Stable dates](#default-stable-dates) can be automatically set for you. In addition to that, you can also overwrite the dates by hand. +https://github.com/gardenlinux/glrd?tab=readme-ov-file#default-major-dates +When creating a new major release, [Default Major dates](#default-major-dates) can be automatically set for you. In addition to that, you can also overwrite the dates by hand. ``` # use default dates -❯ glrd-manage --create stable --version 1312 +❯ glrd-manage --create major --version 1312 # overwrite default dates -❯ glrd-manage --create stable --version 1312 --date-time-released 2023-11-16T00:00:00 --date-time-extended 2024-05-03T00:00:00 --date-time-eol 2024-08-03T00:00:00 +❯ glrd-manage --create major --version 1312 --date-time-released 2023-11-16T00:00:00 --date-time-extended 2024-05-03T00:00:00 --date-time-eol 2024-08-03T00:00:00 ``` -### Create or update a patch release +### Create or update a minor release -When creating a new patch release, the previous patch release of the same major release gets automatically updated. See [Lifecycle Dependencies](#stable-and-patch-releases) for details. +When creating a new minor release, the previous minor release of the same major release gets automatically updated. See [Lifecycle Dependencies](#major-and-minor-releases) for details. ``` -# create new patch -❯ glrd-manage --create patch --version 1312.7 +# create new minor +❯ glrd-manage --create minor --version 1312.7 ``` ### Create a new nightly release -Without any additional parameters, the current timestamp and git information will be used to create releases. For patch, nightly and dev releases, the next free minor version is automatically chosen. +Without any additional parameters, the current timestamp and git information will be used to create releases. For minor, nightly and dev releases, the next free minor version is automatically chosen. ``` ❯ glrd-manage --create nightly @@ -627,16 +633,23 @@ Without any additional parameters, the current timestamp and git information wil ## Release Schema -The Garden Linux Release Database (GLRD) uses structured JSON schemas to represent different types of releases: **stable**, **patch**, **nightly**, and **development** releases. Each release type has specific fields that capture essential information about the release. +The Garden Linux Release Database (GLRD) uses structured JSON schemas to represent different types of releases: **major**, **minor**, **nightly**, and **development** releases. Each release type has specific fields that capture essential information about the release. + +### Versioning Scheme + +Gardenlinux [introduced semantic Versioning](https://github.com/gardenlinux/gardenlinux/issues/3069) in [TODO!!! name commit !!!]. GLRD supports both versioning schemes based on the major version number: + +- **v1: Versions < 2000.0.0**: Use the `major.minor` format (e.g., `27.0`, `1592.6`) +- **v2: Versions ≥ 2000.0.0**: Use the `major.minor.patch` format (e.g., `2000.0.0`, `2222.1.5`) -### Stable Releases +### Major Releases -[Stable releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#stable-releases) are major releases that are supported over an extended period of time. +[Major releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#major-releases) are major releases that are supported over an extended period of time. #### Schema Fields -- **`name`**: A string representing the release name (e.g., `stable-1312`). -- **`type`**: `stable`. +- **`name`**: A string representing the release name (e.g., `major-1312`). +- **`type`**: `major`. - **`version`**: - **`major`**: An integer indicating the major version number (e.g. `1312`). - **`lifecycle`**: @@ -650,17 +663,18 @@ The Garden Linux Release Database (GLRD) uses structured JSON schemas to represe - **`isodate`**: End-of-life date in ISO format. - **`timestamp`**: UNIX timestamp for the end-of-life date. -### Patch Releases +### Minor Releases -[Patch Releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#patches) are updates delivered during the standard and extended mainteance periods of [Stable releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#stable-releases). +[Minor Releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#minores) are updates delivered during the standard and extended mainteance periods of [Major releases](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md#major-releases). #### Schema Fields -- **`name`**: A string representing the release name (e.g., `patch-1312.1`). -- **`type`**: `patch`. +- **`name`**: A string representing the release name (e.g., `minor-1312.1` for v1 versions, `minor-2000.0.0` for v2 versions). +- **`type`**: `minor`. - **`version`**: - **`major`**: An integer indicating the major version number (e.g. `1312`). - **`minor`**: An integer indicating the minor version number (e.g. `1`). + - **`patch`**: An integer indicating the patch version number (only present for versions ≥ 2000.0.0, e.g. `0`). - **`lifecycle`**: - **`released`**: - **`isodate`**: The release date in ISO format. @@ -683,11 +697,12 @@ The Garden Linux Release Database (GLRD) uses structured JSON schemas to represe #### Schema Fields -- **`name`**: A string representing the release name (e.g., `nightly-1312.0`). +- **`name`**: A string representing the release name (e.g., `nightly-1312.0` for v1 versions, `nightly-2000.0.0` for v2 versions). - **`type`**: `nightly`. - **`version`**: - **`major`**: An integer indicating the major version number. - **`minor`**: An integer indicating the minor version number. + - **`patch`**: An integer indicating the patch version number (only present for versions ≥ 2000.0.0, e.g. `0`). - **`lifecycle`**: - **`released`**: - **`isodate`**: The release date in ISO format. @@ -701,15 +716,16 @@ The Garden Linux Release Database (GLRD) uses structured JSON schemas to represe ### Development Releases -[Development releases](TODO: define and link) are used for testing and development purposes, representing the latest changes that may not yet be included in a stable or patch release. These can be manually created by developers. +[Development releases](TODO: define and link) are used for testing and development purposes, representing the latest changes that may not yet be included in a major or minor release. These can be manually created by developers. #### Schema Fields -- **`name`**: A string representing the release name (e.g., `dev-1312.0`). +- **`name`**: A string representing the release name (e.g., `dev-1312.0` for v1 versions, `dev-2000.0.0` for v2 versions). - **`type`**: `dev`. - **`version`**: - **`major`**: An integer indicating the major version number. - **`minor`**: An integer indicating the minor version number. + - **`patch`**: An integer indicating the patch version number (only present for versions ≥ 2000.0.0, e.g. `0`). - **`lifecycle`**: - **`released`**: - **`isodate`**: The release date in ISO format. @@ -746,7 +762,7 @@ The Garden Linux Release Database (GLRD) uses structured JSON schemas to represe The lifecycle fields in the release schemas help track the release dates, extended maintenance dates and end-of-life (EOL) dates for each release. -#### Default Stable dates +#### Default Major dates The defaults for `extended` and `eol` dates are based on the [Garden Linux Release Plan Overview](https://github.com/gardenlinux/gardenlinux/blob/main/docs/00_introduction/release.md) and defined to be: @@ -757,41 +773,41 @@ The defaults for `extended` and `eol` dates are based on the [Garden Linux Relea For example: -- **Stable Release `stable-1443`**: +- **Major Release `major-1443`**: - `released`: 2024-03-13 - `extended`: 2024-09-13 - `eol`: 2025-01-13 -#### Stable and Patch releases +#### Major and Minor releases -There is a dependency between the `lifecycle` fields of **stable** and **patch** releases: +There is a dependency between the `lifecycle` fields of **major** and **minor** releases: -- **EOL of Patch Releases**: The `eol` date of a patch release is set to the `released` date of the next patch release. If there is no subsequent patch release, the `eol` date is aligned with the `eol` date of the corresponding stable release. -- **EOL of Latest Patch Release**: The latest patch release's `eol` date matches the `eol` date of the stable release. -- **EOL of Stable Releases**: The `eol` date for a stable release marks the end of support for that major version. +- **EOL of Minor Releases**: The `eol` date of a minor release is set to the `released` date of the next minor release. If there is no subsequent minor release, the `eol` date is aligned with the `eol` date of the corresponding major release. +- **EOL of Latest Minor Release**: The latest minor release's `eol` date matches the `eol` date of the major release. +- **EOL of Major Releases**: The `eol` date for a major release marks the end of support for that major version. This ensures that all minor updates within a major release adhere to the same overall support timeline. For example: -- **Stable Release `stable-1312`**: +- **Major Release `major-1312`**: - `released`: 2023-11-16 - `extended`: 2024-05-03 - `eol`: 2024-08-03 -- **Patch Release `patch-1312.1`**: +- **Minor Release `minor-1312.1`**: - `released`: 2023-11-23 - - `eol`: 2024-01-15 (next patch release date) -- **Patch Release `patch-1312.2`**: + - `eol`: 2024-01-15 (next minor release date) +- **Minor Release `minor-1312.2`**: - `released`: 2024-01-15 - - `eol`: 2024-02-14 (next patch release date) + - `eol`: 2024-02-14 (next minor release date) - ... -- **Patch Release `patch-1312.7`**: +- **Minor Release `minor-1312.7`**: - `released`: 2024-07-03 - - `eol`: 2024-08-03 (inherits from stable release eol) + - `eol`: 2024-08-03 (inherits from major release eol) -In this example, the `eol` of `patch-1312.1` is set to the `released` date of `patch-1312.2`, and the `eol` of the latest patch release (`patch-1312.7`) is set to the `eol` of the stable release (`patch-1312`). +In this example, the `eol` of `minor-1312.1` is set to the `released` date of `minor-1312.2`, and the `eol` of the latest minor release (`minor-1312.7`) is set to the `eol` of the major release (`minor-1312`). -Please note that the `extended` lifecycle field is not taken into account for patch releases. This is simply an administrative date that has no technical implications. +Please note that the `extended` lifecycle field is not taken into account for minor releases. This is simply an administrative date that has no technical implications. #### Nightly an Dev dates diff --git a/assets/overview.excalidraw b/assets/overview.excalidraw index e40931a..0ec7f37 100644 --- a/assets/overview.excalidraw +++ b/assets/overview.excalidraw @@ -681,11 +681,11 @@ "locked": false, "fontSize": 20, "fontFamily": 5, - "text": "releases-stable.json", + "text": "releases-major.json", "textAlign": "center", "verticalAlign": "middle", "containerId": "Ja2YwuSfTNbOWarmcKJvD", - "originalText": "releases-stable.json", + "originalText": "releases-major.json", "autoResize": true, "lineHeight": 1.25 }, @@ -866,11 +866,11 @@ "locked": false, "fontSize": 20, "fontFamily": 5, - "text": "stable", + "text": "major", "textAlign": "center", "verticalAlign": "middle", "containerId": "l3gC5-Rw97FRtN-srCDfo", - "originalText": "stable", + "originalText": "major", "autoResize": true, "lineHeight": 1.25 }, @@ -1019,11 +1019,11 @@ "locked": false, "fontSize": 16, "fontFamily": 8, - "text": "❯ glrd --type stable --version 1312 --output-format yaml\n- name: stable-1312\n type: stable\n version:\n major: 1312\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700136050\n extended:\n isodate: '2024-05-03'\n timestamp: 1714687200\n eol:\n isodate: '2024-08-03'\n timestamp: 1722636000\n\n❯ glrd --type patch --version 1312.0 --output-format yaml\n- name: patch-1312.0\n type: patch\n version:\n major: 1312\n minor: 0\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700089200\n eol:\n isodate: '2023-11-23'\n timestamp: 1700694000\n git:\n commit: 40b9db2c54c9cac78bcf62a96f5386ac33c1bdab\n commit_short: 40b9db2\n github:\n release: https://github.com/gardenlinux/gardenlinux/releases/tag/1312.0\n\n", + "text": "❯ glrd --type major --version 1312 --output-format yaml\n- name: major-1312\n type: major\n version:\n major: 1312\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700136050\n extended:\n isodate: '2024-05-03'\n timestamp: 1714687200\n eol:\n isodate: '2024-08-03'\n timestamp: 1722636000\n\n❯ glrd --type minor --version 1312.0 --output-format yaml\n- name: minor-1312.0\n type: minor\n version:\n major: 1312\n minor: 0\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700089200\n eol:\n isodate: '2023-11-23'\n timestamp: 1700694000\n git:\n commit: 40b9db2c54c9cac78bcf62a96f5386ac33c1bdab\n commit_short: 40b9db2\n github:\n release: https://github.com/gardenlinux/gardenlinux/releases/tag/1312.0\n\n", "textAlign": "left", "verticalAlign": "top", "containerId": null, - "originalText": "❯ glrd --type stable --version 1312 --output-format yaml\n- name: stable-1312\n type: stable\n version:\n major: 1312\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700136050\n extended:\n isodate: '2024-05-03'\n timestamp: 1714687200\n eol:\n isodate: '2024-08-03'\n timestamp: 1722636000\n\n❯ glrd --type patch --version 1312.0 --output-format yaml\n- name: patch-1312.0\n type: patch\n version:\n major: 1312\n minor: 0\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700089200\n eol:\n isodate: '2023-11-23'\n timestamp: 1700694000\n git:\n commit: 40b9db2c54c9cac78bcf62a96f5386ac33c1bdab\n commit_short: 40b9db2\n github:\n release: https://github.com/gardenlinux/gardenlinux/releases/tag/1312.0\n\n", + "originalText": "❯ glrd --type major --version 1312 --output-format yaml\n- name: major-1312\n type: major\n version:\n major: 1312\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700136050\n extended:\n isodate: '2024-05-03'\n timestamp: 1714687200\n eol:\n isodate: '2024-08-03'\n timestamp: 1722636000\n\n❯ glrd --type minor --version 1312.0 --output-format yaml\n- name: minor-1312.0\n type: minor\n version:\n major: 1312\n minor: 0\n lifecycle:\n released:\n isodate: '2023-11-16'\n timestamp: 1700089200\n eol:\n isodate: '2023-11-23'\n timestamp: 1700694000\n git:\n commit: 40b9db2c54c9cac78bcf62a96f5386ac33c1bdab\n commit_short: 40b9db2\n github:\n release: https://github.com/gardenlinux/gardenlinux/releases/tag/1312.0\n\n", "autoResize": true, "lineHeight": 1.25 }, @@ -1158,11 +1158,11 @@ "locked": false, "fontSize": 20, "fontFamily": 5, - "text": "releases-patch.json", + "text": "releases-minor.json", "textAlign": "center", "verticalAlign": "middle", "containerId": "6NPB-pPlaLq_OppZlAFUO", - "originalText": "releases-patch.json", + "originalText": "releases-minor.json", "autoResize": true, "lineHeight": 1.25 }, @@ -1195,11 +1195,11 @@ "locked": false, "fontSize": 20, "fontFamily": 5, - "text": "patch", + "text": "minor", "textAlign": "center", "verticalAlign": "middle", "containerId": "pFYx8a7oj8CsnNJHBJ2Qg", - "originalText": "patch", + "originalText": "minor", "autoResize": true, "lineHeight": 1.25 }, @@ -1584,11 +1584,11 @@ "locked": false, "fontSize": 16, "fontFamily": 8, - "text": "❯ glrd-manage --s3-update \\\n --create stable \\\n --version 1312\nINFO: stable-1312 - release will be created.\n\n❯ glrd-manage --s3-update \\\n --create patch \\\n --version 1312.0\nINFO: patch-1312.0 - release will be created.", + "text": "❯ glrd-manage --s3-update \\\n --create major \\\n --version 1312\nINFO: major-1312 - release will be created.\n\n❯ glrd-manage --s3-update \\\n --create minor \\\n --version 1312.0\nINFO: minor-1312.0 - release will be created.", "textAlign": "left", "verticalAlign": "top", "containerId": null, - "originalText": "❯ glrd-manage --s3-update \\\n --create stable \\\n --version 1312\nINFO: stable-1312 - release will be created.\n\n❯ glrd-manage --s3-update \\\n --create patch \\\n --version 1312.0\nINFO: patch-1312.0 - release will be created.", + "originalText": "❯ glrd-manage --s3-update \\\n --create major \\\n --version 1312\nINFO: major-1312 - release will be created.\n\n❯ glrd-manage --s3-update \\\n --create minor \\\n --version 1312.0\nINFO: minor-1312.0 - release will be created.", "autoResize": true, "lineHeight": 1.25 } diff --git a/assets/overview.png b/assets/overview.png index 521ddc8..55a3646 100644 Binary files a/assets/overview.png and b/assets/overview.png differ diff --git a/glrd/manage.py b/glrd/manage.py index aadeb3b..7155451 100755 --- a/glrd/manage.py +++ b/glrd/manage.py @@ -1,16 +1,12 @@ import argparse -import atexit -import base64 import fnmatch import json import logging import os -import re import shutil import subprocess import sys import tempfile -import time from datetime import datetime, timedelta import boto3 @@ -21,321 +17,247 @@ from deepdiff import DeepDiff from jsonschema import validate, ValidationError +from glrd.schema_v1 import SCHEMA_V1 +from glrd.schema_v2 import SCHEMA_V2 + from glrd.query import load_all_releases -from glrd.util import * -from python_gardenlinux_lib.flavors.parse_flavors import * -from python_gardenlinux_lib.s3.s3 import * +from glrd.util import ( + DEFAULTS, + ERROR_CODES, + extract_version_data, + isodate_to_timestamp, + timestamp_to_isodate, + get_version, + NoAliasDumper, + get_flavors_from_git, + get_s3_artifacts_data, + get_flavors_from_s3_artifacts, +) # silence boto3 logging boto3.set_stream_logger(name="botocore.credentials", level=logging.ERROR) -# JSON schema for stable, patch, and nightly releases -SCHEMAS = { - "next": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "type": {"enum": ["next"]}, - "version": { - "type": "object", - "properties": { "major": {"enum": ["next"]}}, - "required": ["major"] - }, - "lifecycle": { - "type": "object", - "properties": { - "released": {"type": "object", "properties": { - "isodate": {"type": "string", "format": "date"}, - "timestamp": {"type": "integer"}}, - "required": ["isodate", "timestamp"]}, - "extended": { - "type": "object", - "properties": { - "isodate": {"type": ["string"], "format": "date"}, - "timestamp": {"type": ["integer"]} - } - }, - "eol": { - "type": "object", - "properties": { - "isodate": {"type": ["string"], "format": "date"}, - "timestamp": {"type": ["integer"]} - } - } - }, - "required": ["released", "extended", "eol"] - } - }, - "required": ["name", "type", "version", "lifecycle"] - }, - "stable": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "type": {"enum": ["stable"]}, - "version": { - "type": "object", - "properties": {"major": {"type": "integer"}}, - "required": ["major"] - }, - "lifecycle": { - "type": "object", - "properties": { - "released": {"type": "object", "properties": { - "isodate": {"type": "string", "format": "date"}, - "timestamp": {"type": "integer"}}, - "required": ["isodate", "timestamp"]}, - "extended": { - "type": "object", - "properties": { - "isodate": {"type": ["string"], "format": "date"}, - "timestamp": {"type": ["integer"]} - } - }, - "eol": { - "type": "object", - "properties": { - "isodate": {"type": ["string"], "format": "date"}, - "timestamp": {"type": ["integer"]} - } - } - }, - "required": ["released", "extended", "eol"] - } - }, - "required": ["name", "type", "version", "lifecycle"] - }, - "patch": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "type": {"enum": ["patch"]}, - "version": { - "type": "object", - "properties": { - "major": {"type": "integer"}, - "minor": {"type": "integer"} - }, - "required": ["major", "minor"] - }, - "lifecycle": { - "type": "object", - "properties": { - "released": {"type": "object", "properties": { - "isodate": {"type": "string", "format": "date"}, - "timestamp": {"type": "integer"}}, - "required": ["isodate", "timestamp"]}, - "eol": { - "type": "object", - "properties": { - "isodate": {"type": ["string"], "format": "date"}, - "timestamp": {"type": ["integer"]} - } - } - }, - "required": ["released", "eol"] - }, - "git": { - "type": "object", - "properties": { - "commit": {"type": "string", "pattern": "^[0-9a-f]{40}$"}, - "commit_short": {"type": "string", "pattern": "^[0-9a-f]{7,8}$"} - }, - "required": ["commit", "commit_short"] - }, - "github": { - "type": "object", - "properties": {"release": {"type": "string", "format": "uri"}}, - "required": ["release"] - }, - "flavors": { - "type": "array", - "items": {"type": "string"} - }, - "attributes": { - "type": "object", - "properties": { - "source_repo": { - "type": "boolean", - "default": True - } - }, - "required": ["source_repo"] - } - }, - "required": ["name", "type", "version", "lifecycle", "git", "github"] - }, - "nightly": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "type": {"enum": ["nightly"]}, - "version": { - "type": "object", - "properties": {"major": {"type": "integer"}, "minor": {"type": "integer"}}, - "required": ["major", "minor"] - }, - "lifecycle": { - "type": "object", - "properties": { - "released": {"type": "object", "properties": { - "isodate": {"type": "string", "format": "date"}, - "timestamp": {"type": "integer"}}, - "required": ["isodate", "timestamp"]} - }, - "required": ["released"] - }, - "git": { - "type": "object", - "properties": { - "commit": {"type": "string", "pattern": "^[0-9a-f]{40}$"}, - "commit_short": {"type": "string", "pattern": "^[0-9a-f]{7,8}$"} - }, - "required": ["commit", "commit_short"] - }, - "flavors": { - "type": "array", - "items": {"type": "string"} - }, - "attributes": { - "type": "object", - "properties": { - "source_repo": { - "type": "boolean", - "default": True - } - }, - "required": ["source_repo"] - } - }, - "required": ["name", "type", "version", "lifecycle", "git"] - }, - "dev": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "type": {"enum": ["dev"]}, - "version": { - "type": "object", - "properties": {"major": {"type": "integer"}, "minor": {"type": "integer"}}, - "required": ["major", "minor"] - }, - "lifecycle": { - "type": "object", - "properties": { - "released": {"type": "object", "properties": { - "isodate": {"type": "string", "format": "date"}, - "timestamp": {"type": "integer"}}, - "required": ["isodate", "timestamp"]} - }, - "required": ["released"] - }, - "git": { - "type": "object", - "properties": { - "commit": {"type": "string", "pattern": "^[0-9a-f]{40}$"}, - "commit_short": {"type": "string", "pattern": "^[0-9a-f]{7,8}$"} - }, - "required": ["commit", "commit_short"] - }, - "flavors": { - "type": "array", - "items": {"type": "string"} - }, - "attributes": { - "type": "object", - "properties": { - "source_repo": { - "type": "boolean", - "default": True - } - }, - "required": ["source_repo"] - } - }, - "required": ["name", "type", "version", "lifecycle", "git"] - } -} + +def validate_input_version_format(version, release_type): + """ + Validate that the input version format matches schema requirements. + + Args: + version: Version from user input + + Returns: + tuple: (is_valid, error_message) + """ + + version_parts = version.split(".") + + if release_type in ["major", "next"]: + # major and next don't use major.minor.patch + return True, None + + if len(version_parts) == 2: + major = int(version_parts[0]) + # Check if this version requires v2 schema (with patch field) + if major >= 2000: + return ( + False, + f"Version {'.'.join(version_parts)} requires v2 schema " + f"but missing patch version. Use format: major.minor.patch", + ) + return True, None + elif len(version_parts) == 3: + major = int(version_parts[0]) + # Check if this version should use v1 schema (without patch field) + if major < 2000: + return ( + False, + f"Version {'.'.join(version_parts)} uses v1 schema but " + f"includes patch version. Use format: major.minor", + ) + return True, None + else: + return ( + False, + "Invalid version format. Expected major.minor or " "major.minor.patch", + ) + + +def get_schema_for_release(release): + """ + Get the appropriate schema version for a release based on its version number. + + Args: + release: Release dictionary containing type and version information + + Returns: + Schema dictionary appropriate for the release version + """ + release_type = release.get("type") + version = release.get("version", {}) + + # For major and next releases, always use v2 schema + # (they don't have major.minor.patch version numbers >= 2000) + if release_type in ["major", "next"]: + return SCHEMA_V2[release_type] + + # For minor, nightly, and dev releases, determine schema version based + # on version number + if release_type in ["minor", "nightly", "dev"]: + major = version.get("major", 0) + # minor = version.get("minor", 0) # unused + # patch = version.get("patch", 0) # unused + + # Use v2 schema (with patch field) for versions >= 2000.0.0 + if major >= 2000: + return SCHEMA_V2[release_type] + else: + return SCHEMA_V1[release_type] + + return None + # Global variable to store the path of the cloned gardenlinux repository (cached) repo_clone_path = None + def cleanup_temp_repo(): - """Cleanup function to delete the temporary directory at the end of the script.""" - global repo_clone_path + """Cleanup function to delete the temporary directory at the end of the + script.""" + # global repo_clone_path # unused if repo_clone_path and os.path.exists(repo_clone_path): shutil.rmtree(repo_clone_path) + def glrd_query_type(args, release_type): """Retrieve releases of a specific type.""" - releases = load_all_releases(release_type, DEFAULTS['QUERY_INPUT_TYPE'], - DEFAULTS['QUERY_INPUT_URL'], - DEFAULTS['QUERY_INPUT_FILE_PREFIX'], - DEFAULTS['QUERY_INPUT_FORMAT']) - if not releases: - logging.error(f"Error retrieving releases: {result.stderr}") - sys.exit(ERROR_CODES["query_error"]) - return releases + try: + releases = load_all_releases( + release_type, + DEFAULTS["QUERY_INPUT_TYPE"], + DEFAULTS["QUERY_INPUT_URL"], + DEFAULTS["QUERY_INPUT_FILE_PREFIX"], + DEFAULTS["QUERY_INPUT_FORMAT"], + ) + if not releases: + logging.warning( + f"No releases found for type '{release_type}', returning empty list" + ) + return [] + return releases + except SystemExit: + # If load_all_releases exits due to S3 access issues (like in tests), + # return empty list instead of propagating the exit + logging.warning( + f"Could not retrieve releases for type '{release_type}', returning empty list" + ) + return [] + def get_github_releases(): """Fetch releases from the GitHub API using the 'gh' command.""" - command = ["gh", "api", "--paginate", f"/repos/{DEFAULTS['GL_REPO_OWNER']}/{DEFAULTS['GL_REPO_NAME']}/releases"] - result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + command = [ + "gh", + "api", + "--paginate", + f"/repos/{DEFAULTS['GL_REPO_OWNER']}/" f"{DEFAULTS['GL_REPO_NAME']}/releases", + ] + result = subprocess.run( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True + ) if result.returncode != 0: logging.error(f"Error fetching GitHub releases: {result.stderr}") sys.exit(ERROR_CODES["subprocess_output_error"]) return json.loads(result.stdout) + def get_git_commit_from_tag(tag): """ Fetch the git commit hash for a given tag using the GitHub API. """ try: # Use the GitHub API to get the commit hash from the tag name - command = ["gh", "api", f"/repos/{DEFAULTS['GL_REPO_OWNER']}/{DEFAULTS['GL_REPO_NAME']}/git/refs/tags/{tag}", "--jq", ".object.sha"] - result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + command = [ + "gh", + "api", + f"/repos/{DEFAULTS['GL_REPO_OWNER']}/" + f"{DEFAULTS['GL_REPO_NAME']}/git/refs/tags/{tag}", + "--jq", + ".object.sha", + ] + result = subprocess.run( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True + ) if result.returncode != 0: logging.error(f"Error fetching git commit for tag: {tag}, {result.stderr}") sys.exit(ERROR_CODES["subprocess_output_missing"]) commit = result.stdout.strip() - return commit, commit[:8] # Return full commit and shortened version + return ( + commit, + commit[:8], + ) # Return full commit and shortened version except Exception as e: logging.error(f"Error fetching git commit for tag {tag}: {e}") sys.exit(ERROR_CODES["subprocess_output_error"]) + def ensure_isodate_and_timestamp(lifecycle): """ - Ensure both isodate and timestamp are set for all lifecycle fields (released, extended, eol). - If only one is present, the other is computed. + Ensure both isodate and timestamp are set for all lifecycle fields + (released, extended, eol). If only one is present, the other is + computed. """ - for key in ['released', 'extended', 'eol']: + for key in ["released", "extended", "eol"]: if key in lifecycle and lifecycle[key]: entry = lifecycle[key] # Ensure if 'isodate' exists, 'timestamp' is computed - if 'isodate' in entry and entry['isodate'] and not entry.get('timestamp'): - entry['timestamp'] = isodate_to_timestamp(entry['isodate']) + if "isodate" in entry and entry["isodate"] and not entry.get("timestamp"): + entry["timestamp"] = isodate_to_timestamp(entry["isodate"]) # Ensure if 'timestamp' exists, 'isodate' is computed - elif 'timestamp' in entry and entry['timestamp'] and not entry.get('isodate'): - entry['isodate'] = timestamp_to_isodate(entry['timestamp']) + elif ( + "timestamp" in entry and entry["timestamp"] and not entry.get("isodate") + ): + entry["isodate"] = timestamp_to_isodate(entry["timestamp"]) + -def get_git_commit_at_time(date, time="06:00", branch="main", remote_repo=DEFAULTS['GL_REPO_URL']): - """Fetch the git commit that was at a specific date and time in the main branch, using a temporary cached git clone.""" +def get_git_commit_at_time( + date, time="06:00", branch="main", remote_repo=DEFAULTS["GL_REPO_URL"] +): + """Fetch the git commit that was at a specific date and time in the + main branch, using a temporary cached git clone.""" global repo_clone_path - # Convert the input date and time to the target timezone (UTC) and then to UTC - target_time = datetime.strptime(f"{date} {time}", "%Y-%m-%d %H:%M").astimezone(pytz.timezone('UTC')) - target_time_utc = target_time.astimezone(pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') + # Convert the input date and time to the target timezone (UTC) and + # then to UTC + target_time = datetime.strptime(f"{date} {time}", "%Y-%m-%d %H:%M").astimezone( + pytz.timezone("UTC") + ) + target_time_utc = target_time.astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%SZ") - # If the repository hasn't been cloned yet, clone it to a dynamically created temp directory + # If the repository hasn't been cloned yet, clone it to a dynamically + # created temp directory if not repo_clone_path: # Create a temporary directory for cloning the repository temp_dir = tempfile.mkdtemp(prefix="glrd_temp_repo_") # Perform the shallow clone - clone_command = ["git", "clone", "--depth", "1", "--branch", branch, remote_repo, temp_dir] - clone_result = subprocess.run(clone_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + clone_command = [ + "git", + "clone", + "--depth", + "1", + "--branch", + branch, + remote_repo, + temp_dir, + ] + clone_result = subprocess.run( + clone_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) if clone_result.returncode != 0: logging.error(f"Error cloning remote repository: {clone_result.stderr}") @@ -343,21 +265,45 @@ def get_git_commit_at_time(date, time="06:00", branch="main", remote_repo=DEFAUL # Fetch full history to enable searching through commits by time fetch_command = ["git", "fetch", "--unshallow"] - fetch_result = subprocess.run(fetch_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, cwd=temp_dir) + fetch_result = subprocess.run( + fetch_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + cwd=temp_dir, + ) if fetch_result.returncode != 0: - logging.error(f"Error fetching full history from remote repository: {fetch_result.stderr}") + logging.error( + f"Error fetching full history from remote repository: {fetch_result.stderr}" + ) sys.exit(ERROR_CODES["subprocess_output_error"]) # Cache the clone path to reuse it later repo_clone_path = temp_dir # Use `git rev-list` to find the commit before the specified time - rev_list_command = ["git", "rev-list", "-n", "1", "--before", target_time_utc, branch] - rev_list_result = subprocess.run(rev_list_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, cwd=repo_clone_path) + rev_list_command = [ + "git", + "rev-list", + "-n", + "1", + "--before", + target_time_utc, + branch, + ] + rev_list_result = subprocess.run( + rev_list_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + cwd=repo_clone_path, + ) if rev_list_result.returncode != 0: - logging.error(f"Error fetching git commit for {date} at {time}: {rev_list_result.stderr}") + logging.error( + f"Error fetching git commit for {date} at {time}: {rev_list_result.stderr}" + ) sys.exit(ERROR_CODES["subprocess_output_error"]) commit = rev_list_result.stdout.strip() @@ -371,57 +317,85 @@ def get_git_commit_at_time(date, time="06:00", branch="main", remote_repo=DEFAUL return commit, commit[:8] + def get_garden_version_for_date(release_type, date, existing_releases): """ - Create major and minor version based on the Garden Linux base_date. + Create major.minor.patch version based on the Garden Linux base_date. Logic is taken from `gardenlinux/bin/garden-version`. Major: days since base date. Minor: Next available minor version based on existing releases. + Patch: Next available patch version based on existing releases. """ # Calculate major version base_date = datetime(2020, 3, 31, tzinfo=pytz.UTC) major = (date - base_date).days - if release_type == 'next': + if release_type == "next": minor = 0 - elif release_type == 'stable': + patch = 0 + elif release_type == "major": minor = 0 + patch = 0 else: - # Collect existing minor versions for the given major version and release type + # Collect existing minor versions for the given major version and + # release type existing_minor_versions = [ - release['version'].get('minor', -1) + release["version"].get("minor", -1) for release in existing_releases - if release['type'] == release_type and release['version']['major'] == major + if ( + release["type"] == release_type and release["version"]["major"] == major + ) + ] + existing_patch_versions = [ + release["version"].get("patch", -1) + for release in existing_releases + if ( + release["type"] == release_type + and release["version"]["major"] == major + and release["version"]["minor"] == minor + ) ] - logging.debug(f"Existing minor versions for major {major}: {existing_minor_versions}") + logging.debug( + f"Existing patch versions for major {major} and minor " + f"{minor}: {existing_patch_versions}" + ) if existing_minor_versions: minor = max(existing_minor_versions) + 1 else: minor = 0 - logging.debug(f"New {release_type} version for {date} is {major}.{minor}") + if existing_patch_versions: + patch = max(existing_patch_versions) + 1 + else: + patch = 0 + + logging.debug( + f"New {release_type} version for {date} is " f"{major}.{minor}.{patch}" + ) + + return major, minor, patch - return major, minor def create_initial_releases(releases): - """Generate initial stable and patch releases.""" - release_data_stable = [] - release_data_patch = [] + """Generate initial major and minor releases.""" + release_data_major = [] + release_data_minor = [] latest_minor_versions = {} + latest_patch_versions = {} - releases.sort(key=lambda r: extract_version_data(r['tag_name'])) + releases.sort(key=lambda r: extract_version_data(r["tag_name"])) for release in releases: - tag_name = release.get('tag_name') - major, minor = extract_version_data(tag_name) + tag_name = release.get("tag_name") + major, minor, patch = extract_version_data(tag_name) if major is None: continue - # Determine release type: "patch" if minor exists, otherwise "stable" - release_type = "patch" if minor is not None else "stable" + # Determine release type: "minor" if minor exists, otherwise "major" + release_type = "minor" if minor is not None and patch is not None else "major" release_info = { "name": f"{release_type}-{tag_name}", @@ -429,77 +403,100 @@ def create_initial_releases(releases): "version": {"major": major}, "lifecycle": { "released": { - "isodate": release['published_at'][:10], - "timestamp": isodate_to_timestamp(release['published_at']) + "isodate": release["published_at"][:10], + "timestamp": isodate_to_timestamp(release["published_at"]), }, - "eol": { - "isodate": None, - "timestamp": None - } - } + "eol": {"isodate": None, "timestamp": None}, + }, } - if release_type == "stable": - release_data_stable.append(release_info) - logging.debug(f"Initial stable release '{release_info['name']}' created.") + if release_type == "major": + release_data_major.append(release_info) + logging.debug(f"Initial major release '{release_info['name']}' created.") else: - # For patch releases, add git and github data - if release_type == "patch": + # For minor releases, add git and github data + if release_type == "minor": commit, commit_short = get_git_commit_from_tag(tag_name) - release_info['version']['minor'] = minor - release_info['git'] = { + release_info["version"]["minor"] = minor + release_info["version"]["patch"] = patch + release_info["git"] = { "commit": commit, - "commit_short": commit_short - } - release_info['github'] = { - "release": release['html_url'] + "commit_short": commit_short, } - release_data_patch.append(release_info) - logging.debug(f"Initial patch release '{release_info['name']}' created.") - - if major not in latest_minor_versions or (minor is not None and minor > latest_minor_versions[major]['minor']): + release_info["github"] = {"release": release["html_url"]} + release_data_minor.append(release_info) + logging.debug( + f"Initial minor release '{release_info['name']}' created." + ) + + if major not in latest_minor_versions or ( + (minor is not None and minor > latest_minor_versions[major]["minor"]) + and (patch is not None and patch > latest_minor_versions[major]["patch"]) + ): latest_minor_versions[major] = { - 'index': len(release_data_patch if release_type == "patch" else release_data_stable) - 1, - 'minor': minor + "index": len( + release_data_minor + if release_type == "minor" + else release_data_major + ) + - 1, + "minor": minor, + "patch": patch, } - return release_data_stable, release_data_patch, latest_minor_versions + return ( + release_data_major, + release_data_minor, + latest_minor_versions, + latest_patch_versions, + ) -def create_initial_nightly_releases(stable_releases): - """Generate initial nightly releases from the earliest stable release.""" + +def create_initial_nightly_releases(major_releases): + """Generate initial nightly releases from the earliest major release.""" release_data = [] release_type = "nightly" # Set the default start date to 2020-06-09 06:00 UTC start_date_default = datetime(2020, 6, 9, 6, 0, 0, tzinfo=pytz.UTC) - if stable_releases: - # Get the earliest stable release timestamp - first_stable_release = min(stable_releases, key=lambda r: r['lifecycle']['released']['timestamp']) + if major_releases: + # Get the earliest major release timestamp + first_major_release = min( + major_releases, + key=lambda r: r["lifecycle"]["released"]["timestamp"], + ) # Convert the timestamp to a datetime object and set the time to 06:00 UTC - start_date = datetime.utcfromtimestamp(first_stable_release['lifecycle']['released']['timestamp']).replace(hour=7, minute=0, second=0, tzinfo=pytz.UTC) + start_date = datetime.utcfromtimestamp( + first_major_release["lifecycle"]["released"]["timestamp"] + ).replace(hour=7, minute=0, second=0, tzinfo=pytz.UTC) else: - logging.info("No stable releases found in the generated data. Using default start date.") - # Use the default start date if no stable releases are available + logging.info( + "No major releases found in the generated data. Using default start date." + ) + # Use the default start date if no major releases are available start_date = start_date_default # Ensure current_date is set to 06:00 UTC as well - tz = pytz.timezone('UTC') - current_date = datetime.now(tz).replace(hour=6, minute=0, second=0, microsecond=0) + tz = pytz.timezone("UTC") + current_date = datetime.now(tz).replace(hour=6, minute=0, second=0, patchsecond=0) date = start_date while date <= current_date: - major, minor = get_garden_version_for_date(release_type, date, []) - commit, commit_short = get_git_commit_at_time(date.strftime('%Y-%m-%d')) - nightly_name = f"nightly-{major}.{minor}" + major, minor, patch = get_garden_version_for_date(release_type, date, []) + commit, commit_short = get_git_commit_at_time(date.strftime("%Y-%m-%d")) + nightly_name = f"nightly-{major}.{minor}.{patch}" release_info = { "name": nightly_name, "type": "nightly", - "version": {"major": major, "minor": minor}, + "version": {"major": major, "minor": minor, "patch": patch}, "lifecycle": { - "released": {"isodate": date.strftime('%Y-%m-%d'), "timestamp": int(date.timestamp())} + "released": { + "isodate": date.strftime("%Y-%m-%d"), + "timestamp": int(date.timestamp()), + } }, - "git": {"commit": commit, "commit_short": commit_short} + "git": {"commit": commit, "commit_short": commit_short}, } release_data.append(release_info) logging.debug(f"Initial nightly release '{release_info['name']}' created.") @@ -507,41 +504,51 @@ def create_initial_nightly_releases(stable_releases): return release_data + def create_single_release(release_type, args, existing_releases): """Create a single release of the specified type.""" - if release_type not in DEFAULTS['RELEASE_TYPES']: + if release_type not in DEFAULTS["RELEASE_TYPES"]: logging.error(f"Invalid release type: {release_type}") sys.exit(ERROR_CODES["parameter_missing"]) # Check if a manual lifecycle-released-isodatetime is provided, otherwise use the current date if args.lifecycle_released_isodatetime: try: - release_date = datetime.strptime(args.lifecycle_released_isodatetime, "%Y-%m-%dT%H:%M:%S").replace(tzinfo=pytz.UTC) + release_date = datetime.strptime( + args.lifecycle_released_isodatetime, "%Y-%m-%dT%H:%M:%S" + ).replace(tzinfo=pytz.UTC) except ValueError: - logging.error("Error: Invalid --date-time-release format. Use ISO format: YYYY-MM-DDTHH:MM:SS") + logging.error( + "Error: Invalid --date-time-release format. Use ISO format: YYYY-MM-DDTHH:MM:SS" + ) sys.exit(ERROR_CODES["validation_error"]) else: - tz = pytz.timezone('UTC') + tz = pytz.timezone("UTC") release_date = tz.localize(datetime.now()) - lifecycle_released_isodate = release_date.strftime('%Y-%m-%d') + lifecycle_released_isodate = release_date.strftime("%Y-%m-%d") lifecycle_released_timestamp = int(release_date.timestamp()) if args.lifecycle_extended_isodatetime: try: - extended_date = datetime.strptime(args.lifecycle_extended_isodatetime, "%Y-%m-%dT%H:%M:%S").replace(tzinfo=pytz.UTC) - lifecycle_extended_isodate = extended_date.strftime('%Y-%m-%d') + extended_date = datetime.strptime( + args.lifecycle_extended_isodatetime, "%Y-%m-%dT%H:%M:%S" + ).replace(tzinfo=pytz.UTC) + lifecycle_extended_isodate = extended_date.strftime("%Y-%m-%d") lifecycle_extended_timestamp = int(extended_date.timestamp()) except ValueError: - logging.error("Error: Invalid --lifecycle-extended-isodatetime format. Use ISO format: YYYY-MM-DDTHH:MM:SS") + logging.error( + "Error: Invalid --lifecycle-extended-isodatetime format. " + "Use ISO format: YYYY-MM-DDTHH:MM:SS" + ) sys.exit(ERROR_CODES["validation_error"]) else: - # for stable - default extended maintenance date is release date + 6 months - if release_type == "stable": + # for major - default extended maintenance date is release date + 6 months + if release_type == "major": extended_date = release_date + relativedelta(months=6) - lifecycle_extended_isodate = extended_date.strftime('%Y-%m-%d') + lifecycle_extended_isodate = extended_date.strftime("%Y-%m-%d") lifecycle_extended_timestamp = int(extended_date.timestamp()) - # patch releases will use set_latest_minor_eol_to_major() to set lifecycle fields + # minor releases will use set_latest_minor_eol_to_major() to set lifecycle fields # other release types to not have extended lifecycle fields else: lifecycle_extended_isodate = None @@ -549,19 +556,24 @@ def create_single_release(release_type, args, existing_releases): if args.lifecycle_eol_isodatetime: try: - eol_date = datetime.strptime(args.lifecycle_eol_isodatetime, "%Y-%m-%dT%H:%M:%S").replace(tzinfo=pytz.UTC) - lifecycle_eol_isodate = eol_date.strftime('%Y-%m-%d') + eol_date = datetime.strptime( + args.lifecycle_eol_isodatetime, "%Y-%m-%dT%H:%M:%S" + ).replace(tzinfo=pytz.UTC) + lifecycle_eol_isodate = eol_date.strftime("%Y-%m-%d") lifecycle_eol_timestamp = int(eol_date.timestamp()) except ValueError: - logging.error("Error: Invalid --lifecycle-eol-isodatetime format. Use ISO format: YYYY-MM-DDTHH:MM:SS") + logging.error( + "Error: Invalid --lifecycle-eol-isodatetime format. " + "Use ISO format: YYYY-MM-DDTHH:MM:SS" + ) sys.exit(ERROR_CODES["validation_error"]) else: - # for stable - default eol date is release date + 9 months - if release_type == "stable": + # for major - default eol date is release date + 9 months + if release_type == "major": eol_date = release_date + relativedelta(months=9) - lifecycle_eol_isodate = eol_date.strftime('%Y-%m-%d') + lifecycle_eol_isodate = eol_date.strftime("%Y-%m-%d") lifecycle_eol_timestamp = int(eol_date.timestamp()) - # patch releases will use set_latest_minor_eol_to_major() to set lifecycle fields + # minor releases will use set_latest_minor_eol_to_major() to set lifecycle fields # other release types to not have extended lifecycle fields else: lifecycle_eol_isodate = None @@ -578,127 +590,180 @@ def create_single_release(release_type, args, existing_releases): commit, commit_short = get_git_commit_at_time(lifecycle_released_isodate) # Check if a manual version is provided, otherwise use garden version for the date - if args.create == 'next': - major = 'next' + if args.create == "next": + major = "next" minor = None - elif args.version and args.create == 'stable': - # For 'stable' releases, version should not contain '.' - if '.' in args.version: - logging.error("Error: Invalid --version format for stable release. Use format: major (integer without '.')") + patch = None + elif args.version and args.create == "major": + # For 'major' releases, version should not contain '.' + if "." in args.version: + logging.error( + "Error: Invalid --version format for major release. " + "Use format: major (integer without '.')" + ) sys.exit(ERROR_CODES["validation_error"]) try: major = int(args.version) minor = None + patch = None except ValueError: - logging.error("Error: Invalid --version format. Major version must be an integer.") + logging.error( + "Error: Invalid --version format. Major version must be an integer." + ) sys.exit(ERROR_CODES["validation_error"]) - elif args.version and args.create != 'stable': - # For other releases, version should be 'major.minor' + elif args.version and args.create != "major": try: - major, minor = map(int, args.version.split('.')) + is_valid, error_message = validate_input_version_format( + args.version, args.create + ) + if not is_valid: + logging.error(f"Error: {error_message}") + sys.exit(ERROR_CODES["validation_error"]) + + version_parts = args.version.split(".") + major, minor = map(int, version_parts[:2]) + if len(version_parts) == 2: + patch = 0 + else: + patch = int(version_parts[2]) + except ValueError: - logging.error("Error: Invalid --version format. Use format: major.minor") + logging.error( + "Error: Invalid --version format. Use format: " + "major.minor (for versions < 2000.0.0) or " + "major.minor.patch (for versions >= 2000.0.0)" + ) sys.exit(ERROR_CODES["validation_error"]) else: - major, minor = get_garden_version_for_date(release_type, release_date, existing_releases) + major, minor, patch = get_garden_version_for_date( + release_type, release_date, existing_releases + ) # Create version object - version = {'major': major, 'minor': minor} + version = {"major": major, "minor": minor, "patch": patch} - # First try to get flavors from flavors.yaml - flavors = parse_flavors_commit(commit, version=version, query_s3=False, logger=logging.getLogger()) + flavors = get_flavors_from_git(commit) # Only if no flavors found in flavors.yaml, try S3 if not flavors: logging.info("No flavors found in flavors.yaml, checking S3 artifacts...") # Get artifacts data from S3 with caching - artifacts_data = get_s3_artifacts( - DEFAULTS['ARTIFACTS_S3_BUCKET_NAME'], - DEFAULTS['ARTIFACTS_S3_PREFIX'], - logger=logging.getLogger() + # Get S3 artifacts using gardenlinux library + artifacts_data = get_s3_artifacts_data( + DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"], + DEFAULTS["ARTIFACTS_S3_PREFIX"], + DEFAULTS["ARTIFACTS_S3_CACHE_FILE"], ) if artifacts_data: - flavors = parse_flavors_commit( - commit, - version=version, - query_s3=True, - s3_objects=artifacts_data, - logger=logging.getLogger() - ) + flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit) else: logging.warning("No artifacts data available from S3") if not flavors: - logging.info(f"No flavors found anywhere for version {version} (commit {commit_short})") + logging.info( + f"No flavors found anywhere for version {version} (commit {commit_short})" + ) # Create release data release = {} - release['type'] = f"{release_type}" - release['version'] = {} - release['version']['major'] = major - release['lifecycle'] = {} - release['lifecycle']['released'] = {} - release['lifecycle']['released']['isodate'] = lifecycle_released_isodate - release['lifecycle']['released']['timestamp'] = lifecycle_released_timestamp - - if release_type in ['dev', 'nightly']: - release['name'] = f"{release_type}-{major}.{minor}" - release['version']['minor'] = minor - release['git'] = {} - release['git']['commit'] = commit - release['git']['commit_short'] = commit_short - release['flavors'] = flavors - release['attributes'] = {} - release['attributes']['source_repo'] = True + release["type"] = f"{release_type}" + release["version"] = {} + release["version"]["major"] = major + release["lifecycle"] = {} + release["lifecycle"]["released"] = {} + release["lifecycle"]["released"]["isodate"] = lifecycle_released_isodate + release["lifecycle"]["released"]["timestamp"] = lifecycle_released_timestamp + + if release_type in ["dev", "nightly"]: + # Create name and version based on schema version + if major >= 2000: + # v2 schema: include patch version + release["name"] = f"{release_type}-{major}.{minor}.{patch}" + release["version"]["minor"] = minor + release["version"]["patch"] = patch + else: + # v1 schema: exclude patch version + release["name"] = f"{release_type}-{major}.{minor}" + release["version"]["minor"] = minor + # Don't set patch for v1 schema releases + + release["git"] = {} + release["git"]["commit"] = commit + release["git"]["commit_short"] = commit_short + release["flavors"] = flavors + release["attributes"] = {} + release["attributes"]["source_repo"] = True elif release_type == "next": - release['name'] = f"{release_type}" - release['lifecycle']['extended'] = {} - release['lifecycle']['extended']['isodate'] = lifecycle_extended_isodate - release['lifecycle']['extended']['timestamp'] = lifecycle_extended_timestamp - release['lifecycle']['eol'] = {} - release['lifecycle']['eol']['isodate'] = lifecycle_eol_isodate - release['lifecycle']['eol']['timestamp'] = lifecycle_eol_timestamp - elif release_type == "stable": - release['name'] = f"{release_type}-{major}" - release['lifecycle']['extended'] = {} - release['lifecycle']['extended']['isodate'] = lifecycle_extended_isodate - release['lifecycle']['extended']['timestamp'] = lifecycle_extended_timestamp - release['lifecycle']['eol'] = {} - release['lifecycle']['eol']['isodate'] = lifecycle_eol_isodate - release['lifecycle']['eol']['timestamp'] = lifecycle_eol_timestamp - elif release_type == "patch": - release['name'] = f"{release_type}-{major}.{minor}" - release['version']['minor'] = minor - release['lifecycle']['eol'] = {} - release['lifecycle']['eol']['isodate'] = lifecycle_eol_isodate - release['lifecycle']['eol']['timestamp'] = lifecycle_eol_timestamp - release['git'] = {} - release['git']['commit'] = commit - release['git']['commit_short'] = commit_short - release['github'] = {} - release['github']['release'] = f"https://github.com/gardenlinux/gardenlinux/releases/tag/{major}.{minor}" - release['flavors'] = flavors - release['attributes'] = {} - release['attributes']['source_repo'] = True + release["name"] = f"{release_type}" + release["lifecycle"]["extended"] = {} + release["lifecycle"]["extended"]["isodate"] = lifecycle_extended_isodate + release["lifecycle"]["extended"]["timestamp"] = lifecycle_extended_timestamp + release["lifecycle"]["eol"] = {} + release["lifecycle"]["eol"]["isodate"] = lifecycle_eol_isodate + release["lifecycle"]["eol"]["timestamp"] = lifecycle_eol_timestamp + elif release_type == "major": + release["name"] = f"{release_type}-{major}" + release["lifecycle"]["extended"] = {} + release["lifecycle"]["extended"]["isodate"] = lifecycle_extended_isodate + release["lifecycle"]["extended"]["timestamp"] = lifecycle_extended_timestamp + release["lifecycle"]["eol"] = {} + release["lifecycle"]["eol"]["isodate"] = lifecycle_eol_isodate + release["lifecycle"]["eol"]["timestamp"] = lifecycle_eol_timestamp + elif release_type == "minor": + # Create name and version based on schema version + if major >= 2000: + # v2 schema: include patch version + release["name"] = f"{release_type}-{major}.{minor}.{patch}" + release["version"]["minor"] = minor + release["version"]["patch"] = patch + github_version = f"{major}.{minor}.{patch}" + else: + # v1 schema: exclude patch version + release["name"] = f"{release_type}-{major}.{minor}" + release["version"]["minor"] = minor + # Don't set patch for v1 schema releases + github_version = f"{major}.{minor}" + + release["lifecycle"]["eol"] = {} + release["lifecycle"]["eol"]["isodate"] = lifecycle_eol_isodate + release["lifecycle"]["eol"]["timestamp"] = lifecycle_eol_timestamp + release["git"] = {} + release["git"]["commit"] = commit + release["git"]["commit_short"] = commit_short + release["github"] = {} + release["github"][ + "release" + ] = f"https://github.com/gardenlinux/gardenlinux/releases/tag/{github_version}" + release["flavors"] = flavors + release["attributes"] = {} + release["attributes"]["source_repo"] = True logging.debug(f"Release '{release['name']}' created.") return release -def delete_release(args, next_releases, stable_releases, patch_releases, nightly_releases, dev_releases): + +def delete_release( + args, + next_releases, + major_releases, + minor_releases, + nightly_releases, + dev_releases, +): """Delete a release by name from the appropriate release list.""" - release_type, major, minor = parse_release_name(args.delete) + release_type, major, minor, patch = parse_release_name(args.delete) # Select the appropriate list based on release_type - if release_type == 'next': + if release_type == "next": release_list = next_releases - elif release_type == 'stable': - release_list = stable_releases - elif release_type == 'patch': - release_list = patch_releases - elif release_type == 'nightly': + elif release_type == "major": + release_list = major_releases + elif release_type == "minor": + release_list = minor_releases + elif release_type == "nightly": release_list = nightly_releases - elif release_type == 'dev': + elif release_type == "dev": release_list = dev_releases else: logging.error(f"Error: Unknown release type '{release_type}' in release name.") @@ -707,7 +772,7 @@ def delete_release(args, next_releases, stable_releases, patch_releases, nightly # Find and remove the release release_found = False for release in release_list: - if release['name'] == args.delete: + if release["name"] == args.delete: release_found = True release_list.remove(release) @@ -717,65 +782,80 @@ def delete_release(args, next_releases, stable_releases, patch_releases, nightly logging.debug(f"Release '{args.delete}' will be deleted.") + def merge_input_data(existing_releases, new_releases): """Merge two lists of releases, updating existing releases with new releases.""" # Create a dictionary of releases by name from existing_releases - releases_by_name = {release['name']: release for release in existing_releases} + releases_by_name = {release["name"]: release for release in existing_releases} # Update or add releases from new_data for new_release in new_releases: - releases_by_name[new_release['name']] = new_release + releases_by_name[new_release["name"]] = new_release # Return the merged list of releases return list(releases_by_name.values()) -def set_latest_minor_eol_to_major(stable_releases, patch_releases): + +def set_latest_minor_eol_to_major(major_releases, minor_releases): """Set the EOL of each minor version to the next higher minor version, - and the EOL of the latest minor version to match the stable release.""" + and the EOL of the latest minor version to match the major release.""" releases_by_major = {} # Group releases by major version - for release in patch_releases: - major = release['version']['major'] - minor = release.get('version', {}).get('minor') + for release in minor_releases: + major = release["version"]["major"] + # minor = release.get("version", {}).get("minor") # unused + # patch = release.get("version", {}).get("patch") # unused releases_by_major.setdefault(major, []).append(release) # For each major version, sort the minor releases and set the EOL for major, minor_releases in releases_by_major.items(): # Sort the minor releases by the 'minor' version number - minor_releases.sort(key=lambda r: r.get('version', {}).get('minor', 0)) + minor_releases.sort(key=lambda r: r.get("version", {}).get("minor", 0)) - # Find the corresponding stable release for this major version - stable_release = next((r for r in stable_releases if r['version']['major'] == major), None) + # Find the corresponding major release for this major version + major_release = next( + (r for r in major_releases if r["version"]["major"] == major), + None, + ) # Loop through all minor releases for i, release in enumerate(minor_releases): - # If it's the last minor release, set its EOL to the stable release's EOL + # If it's the last minor release, set its EOL to the major release's EOL if i == len(minor_releases) - 1: - if stable_release: - release['lifecycle']['eol'] = stable_release['lifecycle']['eol'] + if major_release: + release["lifecycle"]["eol"] = major_release["lifecycle"]["eol"] else: - logging.warning(f"No stable release found for major version {major}, skipping EOL update.") + logging.warning( + f"No major release found for major version {major}, skipping EOL update." + ) else: # Set the EOL to the "released" date of the next minor release next_release = minor_releases[i + 1] - release['lifecycle']['eol'] = next_release['lifecycle']['released'] + release["lifecycle"]["eol"] = next_release["lifecycle"]["released"] + def load_input(filename): """Load manual input from a file if it exists.""" try: - input_data = yaml.safe_load(open(filename, 'r')) + input_data = yaml.safe_load(open(filename, "r")) - merged_releases = input_data.get('releases', []) - if len(merged_releases) == 0: - logging.error(f"Error, no releases found in JSON from file") + merged_releases = input_data.get("releases", []) + if len(merged_releases) == 0: + logging.error("Error, no releases found in JSON from file") sys.exit(ERROR_CODES["input_parameter_missing"]) - next_releases = [r for r in merged_releases if r['type'] == 'next'] - stable_releases = [r for r in merged_releases if r['type'] == 'stable'] - patch_releases = [r for r in merged_releases if r['type'] == 'patch'] - nightly_releases = [r for r in merged_releases if r['type'] == 'nightly'] - dev_releases = [r for r in merged_releases if r['type'] == 'dev'] - return next_releases, stable_releases, patch_releases, nightly_releases, dev_releases + next_releases = [r for r in merged_releases if r["type"] == "next"] + major_releases = [r for r in merged_releases if r["type"] == "major"] + minor_releases = [r for r in merged_releases if r["type"] == "minor"] + nightly_releases = [r for r in merged_releases if r["type"] == "nightly"] + dev_releases = [r for r in merged_releases if r["type"] == "dev"] + return ( + next_releases, + major_releases, + minor_releases, + nightly_releases, + dev_releases, + ) except json.JSONDecodeError as e: logging.error(f"Error parsing JSON from file: {str(e)}") sys.exit(ERROR_CODES["validation_error"]) @@ -783,6 +863,7 @@ def load_input(filename): logging.error(f"Error reading input from file: {str(e)}") sys.exit(ERROR_CODES["input_parameter_error"]) + def load_input_stdin(): """Load input from stdin as JSON data.""" try: @@ -791,19 +872,30 @@ def load_input_stdin(): logging.debug(f"Input data from stdin: {input_data}") - merged_releases = input_data.get('releases', []) + merged_releases = input_data.get("releases", []) if len(merged_releases) == 0: - logging.error(f"Error, no releases found in JSON from stdin") + logging.error("Error, no releases found in JSON from stdin") sys.exit(ERROR_CODES["input_parameter_missing"]) - next_releases = [r for r in merged_releases if r['type'] == 'next'] - stable_releases = [r for r in merged_releases if r['type'] == 'stable'] - patch_releases = [r for r in merged_releases if r['type'] == 'patch'] - nightly_releases = [r for r in merged_releases if r['type'] == 'nightly'] - dev_releases = [r for r in merged_releases if r['type'] == 'dev'] - - logging.debug(f"Parsed releases from stdin - next: {len(next_releases)}, stable: {len(stable_releases)}, patch: {len(patch_releases)}, nightly: {len(nightly_releases)}, dev: {len(dev_releases)}") + next_releases = [r for r in merged_releases if r["type"] == "next"] + major_releases = [r for r in merged_releases if r["type"] == "major"] + minor_releases = [r for r in merged_releases if r["type"] == "minor"] + nightly_releases = [r for r in merged_releases if r["type"] == "nightly"] + dev_releases = [r for r in merged_releases if r["type"] == "dev"] + + logging.debug( + f"Parsed releases from stdin - " + f"next: {len(next_releases)}, major: {len(major_releases)}, " + f"minor: {len(minor_releases)}, nightly: {len(nightly_releases)}, " + f"dev: {len(dev_releases)}" + ) - return next_releases, stable_releases, patch_releases, nightly_releases, dev_releases + return ( + next_releases, + major_releases, + minor_releases, + nightly_releases, + dev_releases, + ) except json.JSONDecodeError as e: logging.error(f"Error parsing JSON from stdin: {str(e)}") sys.exit(ERROR_CODES["validation_error"]) @@ -811,37 +903,52 @@ def load_input_stdin(): logging.error(f"Error reading input from stdin: {str(e)}") sys.exit(ERROR_CODES["input_parameter_error"]) + def parse_release_name(release_name): - """Parse the release name in the format 'type-major.minor' or 'type-major'.""" - valid_types = ['next', 'stable', 'patch', 'nightly', 'dev'] - type_and_version = release_name.split('-', 1) + """Parse the release name in the format 'type-major.minor.patch' or + 'type-major.minor' or 'type-major'.""" + valid_types = ["next", "major", "minor", "nightly", "dev"] + type_and_version = release_name.split("-", 1) if len(type_and_version) != 2: - logging.error("Error: Invalid release name format. Expected 'type-major.minor' or 'type-major'") + logging.error( + "Error: Invalid release name format. Expected " + "'type-major.minor.patch' or 'type-major.minor' or 'type-major'" + ) sys.exit(ERROR_CODES["validation_error"]) release_type = type_and_version[0] if release_type not in valid_types: - logging.error(f"Error: Invalid release type '{release_type}'. Must be one of {', '.join(valid_types)}.") + logging.error( + f"Error: Invalid release type '{release_type}'. " + f"Must be one of {', '.join(valid_types)}." + ) sys.exit(ERROR_CODES["validation_error"]) version = type_and_version[1] - version_parts = version.split('.') + version_parts = version.split(".") try: - if len(version_parts) == 2: + if len(version_parts) == 3: + major = int(version_parts[0]) + minor = int(version_parts[1]) + patch = int(version_parts[2]) + elif len(version_parts) == 2: major = int(version_parts[0]) minor = int(version_parts[1]) + patch = None elif len(version_parts) == 1: major = int(version_parts[0]) minor = None + patch = None else: logging.error("Error: Invalid version format in release name.") sys.exit(ERROR_CODES["validation_error"]) except ValueError: - logging.error("Error: Major and minor versions must be integers.") + logging.error("Error: Major, minor and patch versions must be integers.") sys.exit(ERROR_CODES["validation_error"]) - return release_type, major, minor + return release_type, major, minor, patch + def validate_release_data(release, errors): """Validate release data using the appropriate JSON schema.""" - schema = SCHEMAS.get(release['type']) + schema = get_schema_for_release(release) if not schema: error_message = f"Unknown release type: {release['type']}" logging.error(error_message) @@ -852,12 +959,16 @@ def validate_release_data(release, errors): return True except ValidationError as e: # Construct the field path that caused the validation error - field_path = '.'.join([str(p) for p in e.absolute_path]) - error_message = f"Validation error for release '{release['name']}' at '{field_path}': {e.message}" + field_path = ".".join([str(p) for p in e.absolute_path]) + error_message = ( + f"Validation error for release '{release['name']}' " + f"at '{field_path}': {e.message}" + ) logging.error(error_message) errors.append(error_message) return False + def validate_all_releases(releases): """Validate all releases and exit if any validation errors are found.""" errors = [] @@ -867,10 +978,11 @@ def validate_all_releases(releases): logging.error(f"Validation failed for {len(errors)} release(s). Exiting.") sys.exit(ERROR_CODES["validation_error"]) + def diff_releases(existing_merged_releases, merged_releases): """Show which releases will be created, deleted, or updated.""" - existing_releases_by_name = {r['name']: r for r in existing_merged_releases} - new_releases_by_name = {r['name']: r for r in merged_releases} + existing_releases_by_name = {r["name"]: r for r in existing_merged_releases} + new_releases_by_name = {r["name"]: r for r in merged_releases} existing_merged_release_names = set(existing_releases_by_name.keys()) merged_release_names = set(new_releases_by_name.keys()) @@ -895,27 +1007,47 @@ def diff_releases(existing_merged_releases, merged_releases): if diff: logging.info(f"{release_name} - release will be updated.") for change_type, changes in diff.items(): - if change_type == 'values_changed': + if change_type == "values_changed": for path, change in changes.items(): formatted_path = path.replace("root", "") - logging.info(f"{release_name} - {change_type}: {formatted_path} changed from '{change['old_value']}' to '{change['new_value']}'") - elif change_type == 'type_changes': + logging.info( + f"{release_name} - {change_type}: {formatted_path} " + f"changed from '{change['old_value']}' to '{change['new_value']}'" + ) + elif change_type == "type_changes": for path, change in changes.items(): formatted_path = path.replace("root", "") - logging.info(f"{release_name} - {change_type}: {formatted_path} type changed from '{change['old_type']}' to '{change['new_type']}'") - elif change_type in ['dictionary_item_added', 'dictionary_item_removed', 'iterable_item_added', 'iterable_item_removed']: + logging.info( + f"{release_name} - {change_type}: {formatted_path} " + f"type changed from '{change['old_type']}' to '{change['new_type']}'" + ) + elif change_type in [ + "dictionary_item_added", + "dictionary_item_removed", + "iterable_item_added", + "iterable_item_removed", + ]: changes_list = sorted(list(changes)) for change in changes_list: - formatted_change = change.replace("root", "") + # formatted_change = change.replace("root", "") # unused + pass + def save_output_file(data, filename, format="yaml"): """Save the data to a file in the specified format.""" - with open(filename, 'w') as file: - if format == 'yaml': - yaml.dump(data, file, default_flow_style=False, sort_keys=False, Dumper=NoAliasDumper) + with open(filename, "w") as file: + if format == "yaml": + yaml.dump( + data, + file, + default_flow_style=False, + sort_keys=False, + Dumper=NoAliasDumper, + ) else: # Optimize JSON by removing unnecessary spaces - json.dump(data, file, separators=(',', ':'), ensure_ascii=False) + json.dump(data, file, separators=(",", ":"), ensure_ascii=False) + def create_s3_bucket(args, bucket_name=None, region=None): """Create an S3 bucket for storing releases data.""" @@ -924,31 +1056,42 @@ def create_s3_bucket(args, bucket_name=None, region=None): if not region: region = args.s3_bucket_region try: - s3_client = boto3.client('s3', region_name=region) - location = {'LocationConstraint': region} + s3_client = boto3.client("s3", region_name=region) + location = {"LocationConstraint": region} s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location) logging.info(f"Bucket '{bucket_name}' created successfully.") s3_client.put_bucket_tagging( Bucket=bucket_name, Tagging={ - 'TagSet': [ - {'Key': 'sec-by-def-public-storage-exception', 'Value': 'enabled'}, - {'Key': 'sec-by-def-objectversioning-exception', 'Value': 'enabled'}, - {'Key': 'sec-by-def-encrypt-storage-exception', 'Value': 'enabled'} + "TagSet": [ + { + "Key": "sec-by-def-public-storage-exception", + "Value": "enabled", + }, + { + "Key": "sec-by-def-objectversioning-exception", + "Value": "enabled", + }, + { + "Key": "sec-by-def-encrypt-storage-exception", + "Value": "enabled", + }, ] - } + }, ) logging.info(f"Tags added to bucket '{bucket_name}'.") s3_client.put_public_access_block( Bucket=bucket_name, PublicAccessBlockConfiguration={ - 'BlockPublicAcls': False, - 'IgnorePublicAcls': False, - 'BlockPublicPolicy': False, - 'RestrictPublicBuckets': False - } + "BlockPublicAcls": False, + "IgnorePublicAcls": False, + "BlockPublicPolicy": False, + "RestrictPublicBuckets": False, + }, + ) + logging.info( + f"Public access block settings disabled for bucket '{bucket_name}'." ) - logging.info(f"Public access block settings disabled for bucket '{bucket_name}'.") bucket_policy = { "Version": "2012-10-17", "Statement": [ @@ -958,7 +1101,7 @@ def create_s3_bucket(args, bucket_name=None, region=None): "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", - "Resource": f"arn:aws:s3:::{bucket_name}/*" + "Resource": f"arn:aws:s3:::{bucket_name}/*", }, # Deny non-SSL access { @@ -968,25 +1111,26 @@ def create_s3_bucket(args, bucket_name=None, region=None): "Action": "s3:*", "Resource": [ "arn:aws:s3:::gardenlinux-glrd", - "arn:aws:s3:::gardenlinux-glrd/*" + "arn:aws:s3:::gardenlinux-glrd/*", ], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - } - } - ] + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + ], } - s3_client.put_bucket_policy(Bucket=bucket_name, Policy=json.dumps(bucket_policy)) - logging.info(f"Bucket '{bucket_name}' made public and denied non-SSL access with a bucket policy.") + s3_client.put_bucket_policy( + Bucket=bucket_name, Policy=json.dumps(bucket_policy) + ) + logging.info( + f"Bucket '{bucket_name}' made public and denied non-SSL access with a bucket policy." + ) except ClientError as e: logging.error(f"Error creating bucket: {e}") sys.exit(ERROR_CODES["s3_output_error"]) + def upload_to_s3(file_path, bucket_name, bucket_key): """Upload a file to an S3 bucket.""" - s3_client = boto3.client('s3') + s3_client = boto3.client("s3") try: s3_client.upload_file(file_path, bucket_name, bucket_key) logging.debug(f"Uploaded '{file_path}' to 's3://{bucket_name}/{bucket_key}'.") @@ -994,40 +1138,56 @@ def upload_to_s3(file_path, bucket_name, bucket_key): logging.error(f"Error uploading {file_path} to S3: {e}") sys.exit(ERROR_CODES["s3_output_error"]) + def download_from_s3(bucket_name, bucket_key, local_file): """Download a file from an S3 bucket to a local file.""" - s3_client = boto3.client('s3') + s3_client = boto3.client("s3") try: s3_client.download_file(bucket_name, bucket_key, local_file) - logging.debug(f"Downloaded 's3://{bucket_name}/{bucket_key}' to '{local_file}'.") + logging.debug( + f"Downloaded 's3://{bucket_name}/{bucket_key}' to '{local_file}'." + ) except ClientError as e: - if e.response['Error']['Code'] == '404': - logging.warning(f"No existing file found at 's3://{bucket_name}/{bucket_key}', starting with a fresh file.") + if e.response["Error"]["Code"] == "404": + logging.warning( + f"No existing file found at 's3://{bucket_name}/{bucket_key}', " + f"starting with a fresh file." + ) return None # No existing file, so we return None logging.error(f"Error downloading from S3: {e}") sys.exit(ERROR_CODES["s3_output_error"]) + def merge_existing_s3_data(bucket_name, bucket_key, local_file, new_data): """Download, merge, and return the merged data using a temporary file.""" # Use a temporary file that will be automatically deleted when closed - with tempfile.NamedTemporaryFile(delete=True, mode='w+') as temp_file: + with tempfile.NamedTemporaryFile(delete=True, mode="w+") as temp_file: # Download existing releases.json from S3 if it exists download_from_s3(bucket_name, bucket_key, temp_file.name) # Load existing data if the file was successfully downloaded try: temp_file.seek(0) # Go to the start of the file to read the contents - with open(temp_file.name, 'r') as f: + with open(temp_file.name, "r") as f: file_contents = f.read() # Read file contents as a string existing_data = json.loads(file_contents) # Load JSON from string # Ensure we're working with a list - existing_releases = existing_data if isinstance(existing_data, list) else existing_data.get('releases', []) + existing_releases = ( + existing_data + if isinstance(existing_data, list) + else existing_data.get("releases", []) + ) except (json.JSONDecodeError, FileNotFoundError): - logging.warning("Could not decode the existing JSON from S3 or no file exists. Starting with a fresh file.") + logging.warning( + "Could not decode the existing JSON from S3 or no file " + "exists. Starting with a fresh file." + ) existing_releases = [] # Ensure new_data is treated as a list - new_releases = new_data if isinstance(new_data, list) else new_data.get('releases', []) + new_releases = ( + new_data if isinstance(new_data, list) else new_data.get("releases", []) + ) # Use the merge function to merge new and existing releases merged_releases = merge_input_data(existing_releases, new_releases) @@ -1035,51 +1195,55 @@ def merge_existing_s3_data(bucket_name, bucket_key, local_file, new_data): # Return the merged data as a list return merged_releases + def download_all_s3_files(bucket_name, bucket_prefix): """Download all release files from S3 bucket.""" - s3_client = get_s3_client() + s3_client = boto3.client("s3") try: # List all objects in the bucket with the given prefix - paginator = s3_client.get_paginator('list_objects_v2') + paginator = s3_client.get_paginator("list_objects_v2") found_files = False logging.info(f"Looking for files in s3://{bucket_name}/{bucket_prefix}") for page in paginator.paginate(Bucket=bucket_name, Prefix=bucket_prefix): - if 'Contents' in page: + if "Contents" in page: found_files = True - for obj in page['Contents']: - key = obj['Key'] - if key.endswith('.json'): + for obj in page["Contents"]: + key = obj["Key"] + if key.endswith(".json"): local_file = os.path.basename(key) download_from_s3(bucket_name, key, local_file) if not found_files: - logging.warning(f"No release files found in s3://{bucket_name}/{bucket_prefix}") + logging.warning( + f"No release files found in s3://{bucket_name}/{bucket_prefix}" + ) # Create empty files for each release type - for release_type in DEFAULTS['RELEASE_TYPES']: + for release_type in DEFAULTS["RELEASE_TYPES"]: filename = f"releases-{release_type}.json" - save_output_file({'releases': []}, filename, 'json') + save_output_file({"releases": []}, filename, "json") except Exception as e: logging.error(f"Error downloading files from S3: {e}") + def upload_all_local_files(bucket_name, bucket_prefix): """Upload all local release files to S3.""" - s3_client = boto3.client('s3') + s3_client = boto3.client("s3") try: # First find all matching local files matching_files = [] - for file in os.listdir('.'): - for release_type in DEFAULTS['RELEASE_TYPES']: + for file in os.listdir("."): + for release_type in DEFAULTS["RELEASE_TYPES"]: filename = f"releases-{release_type}.json" if fnmatch.fnmatch(file, filename): matching_files.append(file) if not matching_files: - logging.warning(f"No release files found to upload") + logging.warning("No release files found to upload") return # Show what will be uploaded and ask for confirmation @@ -1088,8 +1252,10 @@ def upload_all_local_files(bucket_name, bucket_prefix): bucket_key = f"{bucket_prefix}{file}" print(f" {file} -> s3://{bucket_name}/{bucket_key}") - response = input("\nDo you really want to upload these files to S3? [y/N] ").lower() - if response != 'y': + response = input( + "\nDo you really want to upload these files to S3? [y/N] " + ).lower() + if response != "y": print("Upload cancelled.") return @@ -1109,6 +1275,7 @@ def upload_all_local_files(bucket_name, bucket_prefix): logging.error(f"Error accessing S3: {e}") sys.exit(ERROR_CODES["s3_error"]) + def handle_releases(args): """Handle the creation and deletion of initial or single releases.""" if args.input_all: @@ -1120,41 +1287,63 @@ def handle_releases(args): return if not args.s3_update: - logging.warning(f"'--s3-update' was not passed, skipping S3 update.") + logging.warning("'--s3-update' was not passed, skipping S3 update.") - create_initial_stable, create_initial_patch, create_initial_nightly = False, False, False - next_releases, stable_releases, patch_releases, nightly_releases, dev_releases = [], [], [], [], [] + create_initial_major, create_initial_minor, create_initial_nightly = ( + False, + False, + False, + ) + ( + next_releases, + major_releases, + minor_releases, + nightly_releases, + dev_releases, + ) = ( + [], + [], + [], + [], + [], + ) if args.create_initial_releases: - create_initial_list = args.create_initial_releases.split(',') - create_initial_stable = 'stable' in create_initial_list - create_initial_patch = 'patch' in create_initial_list - create_initial_nightly = 'nightly' in create_initial_list + create_initial_list = args.create_initial_releases.split(",") + create_initial_major = "major" in create_initial_list + create_initial_minor = "minor" in create_initial_list + create_initial_nightly = "nightly" in create_initial_list - # Variables to store inputs, dev/stable/patch releases and nightly releases + # Variables to store inputs, dev/major/minor releases and nightly releases existing_next_releases = [] - existing_stable_releases = [] - existing_patch_releases = [] + existing_major_releases = [] + existing_minor_releases = [] existing_nightly_releases = [] existing_dev_releases = [] existing_merged_releases = [] next_releases = [] - stable_releases = [] - patch_releases = [] + major_releases = [] + minor_releases = [] nightly_releases = [] dev_releases = [] merged_releases = [] if not args.no_query: - # Execute glrd command to fill stable, patch, nightly, and dev releases + # Execute glrd command to fill major, minor, nightly, and dev releases existing_next_releases = glrd_query_type(args, "next") - existing_stable_releases = glrd_query_type(args, "stable") - existing_patch_releases = glrd_query_type(args, "patch") + existing_major_releases = glrd_query_type(args, "major") + existing_minor_releases = glrd_query_type(args, "minor") existing_nightly_releases = glrd_query_type(args, "nightly") - existing_merged_releases = existing_next_releases + existing_stable_releases + existing_patch_releases + existing_nightly_releases + existing_dev_releases + existing_merged_releases = ( + existing_next_releases + + existing_major_releases + + existing_minor_releases + + existing_nightly_releases + + existing_dev_releases + ) next_releases.extend(existing_next_releases) - stable_releases.extend(existing_stable_releases) - patch_releases.extend(existing_patch_releases) + major_releases.extend(existing_major_releases) + minor_releases.extend(existing_minor_releases) nightly_releases.extend(existing_nightly_releases) dev_releases.extend(existing_dev_releases) @@ -1162,173 +1351,331 @@ def handle_releases(args): if args.no_query: logging.error("Error: '--delete' cannot run with '--no-query'.") sys.exit(ERROR_CODES["parameter_missing"]) - delete_release(args, next_releases, stable_releases, patch_releases, nightly_releases, dev_releases) + delete_release( + args, + next_releases, + major_releases, + minor_releases, + nightly_releases, + dev_releases, + ) else: - if create_initial_stable or create_initial_patch: + if create_initial_major or create_initial_minor: github_releases = get_github_releases() - stable_releases, patch_releases, latest_minor_versions = create_initial_releases(github_releases) + ( + major_releases, + minor_releases, + latest_minor_versions, + latest_patch_versions, + ) = create_initial_releases(github_releases) # Add stdin input or file input data if provided (existing releases will be overwritten) if args.input_stdin or args.input: if args.input_stdin: - input_next, input_stable, input_patch, input_nightly, input_dev = load_input_stdin() + ( + input_next, + input_major, + input_minor, + input_nightly, + input_dev, + ) = load_input_stdin() elif args.input: - input_next, input_stable, input_patch, input_nightly, input_dev = load_input(args.input_file) + ( + input_next, + input_major, + input_minor, + input_nightly, + input_dev, + ) = load_input(args.input_file) next_releases = merge_input_data(next_releases, input_next) - stable_releases = merge_input_data(stable_releases, input_stable) - patch_releases = merge_input_data(patch_releases, input_patch) + major_releases = merge_input_data(major_releases, input_major) + minor_releases = merge_input_data(minor_releases, input_minor) nightly_releases = merge_input_data(nightly_releases, input_nightly) dev_releases = merge_input_data(dev_releases, input_dev) - # we define stable releases in input file, therefore this has to be run past defining inputs - # Create initial nightly releases if requested (needs stable releases) + # we define major releases in input file, therefore this has to be run past defining inputs + # Create initial nightly releases if requested (needs major releases) if create_initial_nightly: - nightly_releases = create_initial_nightly_releases(stable_releases) + nightly_releases = create_initial_nightly_releases(major_releases) # Create a next release if requested - if args.create == 'next': - release = create_single_release('next', args, next_releases) + if args.create == "next": + release = create_single_release("next", args, next_releases) next_releases = merge_input_data(next_releases, [release]) - # Create a stable release if requested - if args.create == 'stable': - release = create_single_release('stable', args, stable_releases) - stable_releases = merge_input_data(stable_releases, [release]) + # Create a major release if requested + if args.create == "major": + release = create_single_release("major", args, major_releases) + major_releases = merge_input_data(major_releases, [release]) - # Create a patch release if requested - if args.create == 'patch': - release = create_single_release('patch', args, patch_releases) - patch_releases = merge_input_data(patch_releases, [release]) + # Create a minor release if requested + if args.create == "minor": + release = create_single_release("minor", args, minor_releases) + minor_releases = merge_input_data(minor_releases, [release]) # Create a nightly release if requested - if args.create == 'nightly': - release = create_single_release('nightly', args, nightly_releases) + if args.create == "nightly": + release = create_single_release("nightly", args, nightly_releases) nightly_releases = merge_input_data(nightly_releases, [release]) # Create a development release if requested - if args.create == 'dev': - release = create_single_release('dev', args, dev_releases) + if args.create == "dev": + release = create_single_release("dev", args, dev_releases) dev_releases = merge_input_data(dev_releases, [release]) - # Set EOL for patch releases based on latest minor versions - set_latest_minor_eol_to_major(stable_releases, patch_releases) + # Set EOL for minor releases based on latest minor versions + set_latest_minor_eol_to_major(major_releases, minor_releases) # Merge all releases into a single list - merged_releases = next_releases + stable_releases + patch_releases + nightly_releases + dev_releases + merged_releases = ( + next_releases + + major_releases + + minor_releases + + nightly_releases + + dev_releases + ) # Ensure timestamps for all releases for release in merged_releases: - ensure_isodate_and_timestamp(release['lifecycle']) + ensure_isodate_and_timestamp(release["lifecycle"]) # Validate all releases validate_all_releases(merged_releases) # split all releases again - next_releases = [r for r in merged_releases if r['type'] == 'next'] - stable_releases = [r for r in merged_releases if r['type'] == 'stable'] - patch_releases = [r for r in merged_releases if r['type'] == 'patch'] - nightly_releases = [r for r in merged_releases if r['type'] == 'nightly'] - dev_releases = [r for r in merged_releases if r['type'] == 'dev'] + next_releases = [r for r in merged_releases if r["type"] == "next"] + major_releases = [r for r in merged_releases if r["type"] == "major"] + minor_releases = [r for r in merged_releases if r["type"] == "minor"] + nightly_releases = [r for r in merged_releases if r["type"] == "nightly"] + dev_releases = [r for r in merged_releases if r["type"] == "dev"] diff_releases(existing_merged_releases, merged_releases) store_releases(args, merged_releases) + def store_releases(args, merged_releases): """Store releases in splitted or not splitted output.""" if args.no_output_split: handle_output(args, args.s3_bucket_name, args.s3_bucket_prefix, merged_releases) else: - handle_splitted_output(args, args.s3_bucket_name, args.s3_bucket_prefix, merged_releases) + handle_splitted_output( + args, args.s3_bucket_name, args.s3_bucket_prefix, merged_releases + ) + def handle_splitted_output(args, bucket_name, bucket_prefix, releases): - """Handle output of splitted releases (next, stable, patch, nightly, dev) to disk and S3.""" - for release_type in DEFAULTS['RELEASE_TYPES']: - releases_filtered = [r for r in releases if r['type'] == release_type] + """Handle output of splitted releases (next, major, minor, nightly, dev) to disk and S3.""" + for release_type in DEFAULTS["RELEASE_TYPES"]: + releases_filtered = [r for r in releases if r["type"] == release_type] if releases_filtered: # Always use json format for S3 storage s3_output_file = f"{args.output_file_prefix}-{release_type}.json" # Use requested format for local file - local_output_file = f"{args.output_file_prefix}-{release_type}.{args.output_format}" + local_output_file = ( + f"{args.output_file_prefix}-{release_type}.{args.output_format}" + ) # Save local file in requested format - save_output_file({'releases': releases_filtered}, filename=local_output_file, format=args.output_format) + save_output_file( + {"releases": releases_filtered}, + filename=local_output_file, + format=args.output_format, + ) # Handle S3 upload if the argument is provided if args.s3_update: # For S3, always use JSON format - save_output_file({'releases': releases_filtered}, filename=s3_output_file, format='json') - releases_filtered = merge_existing_s3_data(bucket_name, f"{bucket_prefix}{s3_output_file}", s3_output_file, releases_filtered) - upload_to_s3(s3_output_file, bucket_name, f"{bucket_prefix}{s3_output_file}") + save_output_file( + {"releases": releases_filtered}, + filename=s3_output_file, + format="json", + ) + releases_filtered = merge_existing_s3_data( + bucket_name, + f"{bucket_prefix}{s3_output_file}", + s3_output_file, + releases_filtered, + ) + upload_to_s3( + s3_output_file, + bucket_name, + f"{bucket_prefix}{s3_output_file}", + ) # Clean up temporary JSON file if local format is different - if args.output_format != 'json': + if args.output_format != "json": try: os.remove(s3_output_file) except OSError: pass + def handle_output(args, bucket_name, bucket_prefix, releases): """Handle output of not splitted releases to disk and S3.""" output_file = f"{args.output_file_prefix}.{args.output_format}" - save_output_file({'releases': releases}, filename=output_file, format=args.output_format) + save_output_file( + {"releases": releases}, filename=output_file, format=args.output_format + ) logging.debug(f"Release data saved to '{output_file}'.") # Handle S3 upload if the argument is provided if args.s3_update: - merged_releases = merge_existing_s3_data(bucket_name, f"{bucket_prefix}{os.path.basename(output_file)}", output_file, releases) - upload_to_s3(output_file, bucket_name, f"{bucket_prefix}{os.path.basename(output_file)}") + # merged_releases = merge_existing_s3_data( # unused + merge_existing_s3_data( + bucket_name, + f"{bucket_prefix}{os.path.basename(output_file)}", + output_file, + releases, + ) + upload_to_s3( + output_file, + bucket_name, + f"{bucket_prefix}{os.path.basename(output_file)}", + ) + def parse_arguments(): parser = argparse.ArgumentParser(description="Manage Garden Linux releases data.") - parser.add_argument('--log-level', type=str, - choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], - default='INFO', help="Set the logging level") - - parser.add_argument('--input-file', type=str, default=DEFAULTS['MANAGE_INPUT_FILE'], - help="The name of the input file (default: releases-input.yaml).") - - parser.add_argument('--output-format', type=str, choices=['yaml', 'json'], - default=DEFAULTS['MANAGE_OUTPUT_FORMAT'], - help="Output format: yaml or json (default: yaml).") - - parser.add_argument('--output-file-prefix', type=str, - default=DEFAULTS['MANAGE_OUTPUT_FILE_PREFIX'], - help="The prefix for output files (default: releases).") - - parser.add_argument('--s3-bucket-name', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_NAME'], - help="Name of S3 bucket. Defaults to 'gardenlinux-glrd'.") - - parser.add_argument('--s3-bucket-region', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_REGION'], - help="Region for S3 bucket. Defaults to 'eu-central-1'.") - - parser.add_argument('--s3-bucket-prefix', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_PREFIX'], - help="Prefix for S3 bucket objects. Defaults to empty string.") - - parser.add_argument('--delete', type=str, help="Delete a release by name (format: type-major.minor). Requires --s3-update.") - parser.add_argument('--create-initial-releases', type=str, help="Comma-separated list of initial releases to retrieve and generate: 'stable,patch,nightly'.") - parser.add_argument('--create', type=str, help="Create a release for this type using the current timestamp and git information (choose one of: stable,patch,nightly,dev,next)'.") - parser.add_argument('--version', type=str, help="Manually specify the version (format: major.minor).") - parser.add_argument('--commit', type=str, help="Manually specify the git commit hash (40 characters).") - parser.add_argument('--lifecycle-released-isodatetime', type=str, help="Manually specify the release date and time in ISO format (YYYY-MM-DDTHH:MM:SS).") - parser.add_argument('--lifecycle-extended-isodatetime', type=str, help="Manually specify the extended maintenance date and time in ISO format (YYYY-MM-DDTHH:MM:SS).") - parser.add_argument('--lifecycle-eol-isodatetime', type=str, help="Manually specify the EOL date and time in ISO format (YYYY-MM-DDTHH:MM:SS).") - parser.add_argument('--no-query', action='store_true', help="Do not query and use existing releases using glrd command. Be careful, this can delete your releases.") - parser.add_argument('--input-stdin', action='store_true', help="Process a single input from stdin (JSON data).") - parser.add_argument('--input', action='store_true', help="Process input from --input-file.") - parser.add_argument('--no-output-split', action='store_true', help="Do not split Output into stable+patch and nightly. Additional output-files *-nightly and *-dev will not be created.") - parser.add_argument('--s3-create-bucket', action='store_true', help="Create an S3 bucket.") - parser.add_argument('--s3-update', action='store_true', help="Update (merge) the generated files with S3.") - parser.add_argument('--output-all', action='store_true', - help="Download and write all release files found in S3 to local disk") - parser.add_argument('--input-all', action='store_true', - help="Upload all local release files to S3") - parser.add_argument('-V', action='version', version=f'%(prog)s {get_version()}') + parser.add_argument( + "--log-level", + type=str, + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + default="INFO", + help="Set the logging level", + ) + + parser.add_argument( + "--input-file", + type=str, + default=DEFAULTS["MANAGE_INPUT_FILE"], + help="The name of the input file (default: releases-input.yaml).", + ) + + parser.add_argument( + "--output-format", + type=str, + choices=["yaml", "json"], + default=DEFAULTS["MANAGE_OUTPUT_FORMAT"], + help="Output format: yaml or json (default: yaml).", + ) + + parser.add_argument( + "--output-file-prefix", + type=str, + default=DEFAULTS["MANAGE_OUTPUT_FILE_PREFIX"], + help="The prefix for output files (default: releases).", + ) + + parser.add_argument( + "--s3-bucket-name", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_NAME"], + help="Name of S3 bucket. Defaults to 'gardenlinux-glrd'.", + ) + + parser.add_argument( + "--s3-bucket-region", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_REGION"], + help="Region for S3 bucket. Defaults to 'eu-central-1'.", + ) + + parser.add_argument( + "--s3-bucket-prefix", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_PREFIX"], + help="Prefix for S3 bucket objects. Defaults to empty string.", + ) + + parser.add_argument( + "--delete", + type=str, + help="Delete a release by name (format: type-major.minor or " + "type-major.minor.patch). Requires --s3-update.", + ) + parser.add_argument( + "--create-initial-releases", + type=str, + help="Comma-separated list of initial releases to retrieve and " + "generate: 'major,minor,nightly'.", + ) + parser.add_argument( + "--create", + type=str, + help="Create a release for this type using the current timestamp " + "and git information (choose one of: major,minor,nightly,dev,next)'.", + ) + parser.add_argument( + "--version", + type=str, + help="Manually specify the version (format: major.minor for " + "versions < 2000.0.0, or major.minor.patch for versions >= 2000.0.0).", + ) + parser.add_argument( + "--commit", + type=str, + help="Manually specify the git commit hash (40 characters).", + ) + parser.add_argument( + "--lifecycle-released-isodatetime", + type=str, + help="Manually specify the release date and time in ISO format " + "(YYYY-MM-DDTHH:MM:SS).", + ) + parser.add_argument( + "--lifecycle-extended-isodatetime", + type=str, + help="Manually specify the extended maintenance date and time in " + "ISO format (YYYY-MM-DDTHH:MM:SS).", + ) + parser.add_argument( + "--lifecycle-eol-isodatetime", + type=str, + help="Manually specify the EOL date and time in ISO format " + "(YYYY-MM-DDTHH:MM:SS).", + ) + parser.add_argument( + "--no-query", + action="store_true", + help="Do not query and use existing releases using glrd command. " + "Be careful, this can delete your releases.", + ) + parser.add_argument( + "--input-stdin", + action="store_true", + help="Process a single input from stdin (JSON data).", + ) + parser.add_argument( + "--input", action="store_true", help="Process input from --input-file." + ) + parser.add_argument( + "--no-output-split", + action="store_true", + help="Do not split Output into major+minor and nightly. Additional " + "output-files *-nightly and *-dev will not be created.", + ) + parser.add_argument( + "--s3-create-bucket", action="store_true", help="Create an S3 bucket." + ) + parser.add_argument( + "--s3-update", + action="store_true", + help="Update (merge) the generated files with S3.", + ) + parser.add_argument( + "--output-all", + action="store_true", + help="Download and write all release files found in S3 to " "local disk", + ) + parser.add_argument( + "--input-all", + action="store_true", + help="Upload all local release files to S3", + ) + parser.add_argument("-V", action="version", version=f"%(prog)s {get_version()}") args = parser.parse_args() @@ -1341,16 +1688,15 @@ def parse_arguments(): return args + def main(): args = parse_arguments() # Configure logging with the already uppercase level - logging.basicConfig( - level=args.log_level, - format='%(levelname)s: %(message)s' - ) + logging.basicConfig(level=args.log_level, format="%(levelname)s: %(message)s") handle_releases(args) + if __name__ == "__main__": main() diff --git a/glrd/query.py b/glrd/query.py index 74f483c..9d8c937 100755 --- a/glrd/query.py +++ b/glrd/query.py @@ -9,79 +9,169 @@ import tabulate import yaml -from glrd.util import * -from python_gardenlinux_lib.flavors.parse_flavors import * -from python_gardenlinux_lib.s3.s3 import * - -DEFAULTS = dict(DEFAULTS, **{ - 'POSSIBLE_FIELDS_MAP': { - "Name": lambda r: r.get('name', 'N/A'), - "Version": lambda r: get_version_string(r['version'], r.get('type')), - "Type": lambda r: r.get('type', 'N/A'), - "GitCommit": lambda r: r.get('git', {}).get('commit', 'N/A'), - "GitCommitShort": lambda r: r.get('git', {}).get('commit_short', 'N/A'), - "ReleaseDate": lambda r: r['lifecycle']['released'].get('isodate', 'N/A'), - "ReleaseTime": lambda r: timestamp_to_isotime(r['lifecycle']['released'].get('timestamp')), - "ExtendedMaintenance": lambda r: get_extended_maintenance(r), - "EndOfMaintenance": lambda r: r['lifecycle'].get('eol', {}).get('isodate', 'N/A'), - "Flavors": lambda r: ','.join(r.get('flavors', [])) or 'N/A', # Simple comma-separated list for shell output - "OCI": lambda r: get_oci_url(r), - "AttributesSourceRepo": lambda r: str(r.get('attributes', {}).get('source_repo', 'N/A')) - } -}) +from glrd.util import ( + DEFAULTS, + ERROR_CODES, + get_current_timestamp, + timestamp_to_isotime, + get_version, + NoAliasDumper, +) + +DEFAULTS = dict( + DEFAULTS, + **{ + "POSSIBLE_FIELDS_MAP": { + "Name": lambda r: r.get("name", "N/A"), + "Version": lambda r: get_version_string(r["version"], r.get("type")), + "Type": lambda r: r.get("type", "N/A"), + "GitCommit": lambda r: r.get("git", {}).get("commit", "N/A"), + "GitCommitShort": lambda r: r.get("git", {}).get("commit_short", "N/A"), + "ReleaseDate": lambda r: r["lifecycle"]["released"].get("isodate", "N/A"), + "ReleaseTime": lambda r: timestamp_to_isotime( + r["lifecycle"]["released"].get("timestamp") + ), + "ExtendedMaintenance": lambda r: get_extended_maintenance(r), + "EndOfMaintenance": lambda r: r["lifecycle"] + .get("eol", {}) + .get("isodate", "N/A"), + "Flavors": lambda r: ",".join(r.get("flavors", [])) + or "N/A", # Simple comma-separated list for shell output + "OCI": lambda r: get_oci_url(r), + "AttributesSourceRepo": lambda r: str( + r.get("attributes", {}).get("source_repo", "N/A") + ), + } + }, +) + def get_version_string(version, release_type=None): - """Return a version string from a version object. Show major only for stable and next releases.""" - if release_type in ['stable', 'next']: - return str(version['major']) # Stable releases show only major version - return f"{version['major']}.{version.get('minor', 0)}" + """ + Return a version string from a version object, respecting versioned schemas. + + Args: + version: Version object containing major, minor, and optionally patch fields + release_type: Type of release (major, minor, nightly, dev, next) + + Returns: + Formatted version string appropriate for the release type and version + """ + if release_type in ["major", "next"]: + return str(version["major"]) # Major releases show only major version + + # For minor, nightly, and dev releases, determine format based on version number + major = version.get("major", 0) + minor = version.get("minor", 0) + patch = version.get("patch", 0) + + # Use v2 schema format (with patch) for versions >= 2000.0.0 + if major >= 2000: + return f"{major}.{minor}.{patch}" + else: + # Use v1 schema format (without patch) for versions < 2000.0.0 + return f"{major}.{minor}" + def is_active_release(release, current_timestamp): """Check if the release is still active based on its EOL timestamp.""" - eol_timestamp = release.get('lifecycle', {}).get('eol', {}).get('timestamp') + eol_timestamp = release.get("lifecycle", {}).get("eol", {}).get("timestamp") return eol_timestamp and eol_timestamp > current_timestamp + def filter_active_releases(releases): """Filter and return only active releases.""" current_timestamp = get_current_timestamp() - return [release for release in releases if is_active_release(release, current_timestamp)] + return [ + release for release in releases if is_active_release(release, current_timestamp) + ] + def is_archived_release(release, current_timestamp): """Check if the release archived based on its EOL timestamp.""" - eol_timestamp = release.get('lifecycle', {}).get('eol', {}).get('timestamp') + eol_timestamp = release.get("lifecycle", {}).get("eol", {}).get("timestamp") return eol_timestamp and eol_timestamp < current_timestamp + def filter_archived_releases(releases): """Filter and return only archived releases.""" current_timestamp = get_current_timestamp() - return [release for release in releases if is_archived_release(release, current_timestamp)] + return [ + release + for release in releases + if is_archived_release(release, current_timestamp) + ] + def filter_releases(releases, release_types=None, version=None): - """Filter releases by type and/or version.""" + """ + Filter releases by type and/or version, respecting versioned schemas. + + Args: + releases: List of release objects to filter + release_types: Comma-separated string of release types to include + version: Version string in format major.minor.patch (patch is optional) + + Returns: + Filtered list of releases + """ if version: - version_parts = version.split('.') + version_parts = version.split(".") major = int(version_parts[0]) minor = int(version_parts[1]) if len(version_parts) > 1 else None + patch = int(version_parts[2]) if len(version_parts) > 2 else None + releases = [ - r for r in releases - if r['version']['major'] == major and (minor is None or r['version'].get('minor', 0) == minor) + r + for r in releases + if r["version"]["major"] == major + and (minor is None or r["version"].get("minor", 0) == minor) + and (patch is None or r["version"].get("patch", 0) == patch) ] + if release_types: - release_types = release_types.split(',') - releases = [r for r in releases if r.get('type') in release_types] + release_types = release_types.split(",") + releases = [r for r in releases if r.get("type") in release_types] + return releases + def find_latest_release(releases): - """Find the latest release by version.""" - return max(releases, key=lambda r: (r['version']['major'], r['version'].get('minor', 0)), default=None) + """ + Find the latest release by version, respecting versioned schemas. + + Args: + releases: List of release objects to search + + Returns: + The latest release object or None if no releases found + """ + + def get_version_key(release): + """Get version key for comparison, respecting versioned schemas.""" + version = release["version"] + major = version["major"] + minor = version.get("minor", 0) + patch = version.get("patch", 0) + + # For versions >= 2000.0.0, include patch in comparison + if major >= 2000: + return (major, minor, patch) + else: + # For versions < 2000.0.0, exclude patch from comparison + return (major, minor, 0) + + return max(releases, key=get_version_key, default=None) + def get_extended_maintenance(release): """Return the extended maintenance date for a release, if available.""" - extended = release['lifecycle'].get('extended', {}) - if extended.get('isodate'): - return extended['isodate'] + extended = release["lifecycle"].get("extended", {}) + if extended.get("isodate"): + return extended["isodate"] # Default to N/A if extended is missing - return 'N/A' + return "N/A" + def duration_in_months(start_date, end_date): """Compute the number of months between two dates.""" @@ -89,62 +179,73 @@ def duration_in_months(start_date, end_date): months = delta.days / 30.44 # Approximate average days in a month return round(months) + def prepare_structured_output(release): """Prepare a release object for JSON/YAML output by adding URLs and organizing fields.""" release_copy = {} # Add core fields first in specific order - core_fields = ['name', 'type', 'version', 'lifecycle', 'git', 'github'] + core_fields = ["name", "type", "version", "lifecycle", "git", "github"] for field in core_fields: if field in release: release_copy[field] = release[field] # Add flavors with URLs - release_copy['flavors'] = format_flavors_with_urls(release) + release_copy["flavors"] = format_flavors_with_urls(release) # Add OCI URL - release_copy['oci'] = get_oci_url(release) + release_copy["oci"] = get_oci_url(release) # Add attributes last - if 'attributes' in release: - release_copy['attributes'] = release['attributes'] + if "attributes" in release: + release_copy["attributes"] = release["attributes"] return release_copy + def format_structured_output(releases, output_format): """Format releases as JSON or YAML.""" releases_with_urls = [prepare_structured_output(r) for r in releases] data = {"releases": releases_with_urls} - if output_format == 'json': + if output_format == "json": return json.dumps(data, indent=2) else: # yaml - return yaml.dump(data, default_flow_style=False, sort_keys=False, Dumper=NoAliasDumper) + return yaml.dump( + data, + default_flow_style=False, + sort_keys=False, + Dumper=NoAliasDumper, + ) + def format_tabular_output(releases, fields, no_header, output_format): """Format releases as a table for shell or markdown output.""" # Get the requested fields or use defaults - selected_fields = fields.split(',') if fields else DEFAULTS['QUERY_FIELDS'].split(',') + selected_fields = ( + fields.split(",") if fields else DEFAULTS["QUERY_FIELDS"].split(",") + ) # Get all available fields for validation - all_fields = list(DEFAULTS['POSSIBLE_FIELDS_MAP'].keys()) + all_fields = list(DEFAULTS["POSSIBLE_FIELDS_MAP"].keys()) # Validate and filter the fields selected_fields, _ = filter_fields(all_fields, [], selected_fields) # Generate rows using only the selected fields rows = [ - [DEFAULTS['POSSIBLE_FIELDS_MAP'][field](r) for field in selected_fields] + [DEFAULTS["POSSIBLE_FIELDS_MAP"][field](r) for field in selected_fields] for r in releases ] headers = selected_fields if not no_header else () - if output_format == 'shell': + if output_format == "shell": return tabulate.tabulate(rows, headers, tablefmt="plain", disable_numparse=True) - elif output_format == 'markdown': + elif output_format == "markdown": return tabulate.tabulate(rows, headers, tablefmt="pipe", disable_numparse=True) + def filter_fields(headers, rows, requested_fields): """Filter columns in tabular data based on requested fields.""" # Validate requested fields @@ -152,7 +253,7 @@ def filter_fields(headers, rows, requested_fields): if invalid_fields: logging.error(f"Invalid field(s): {', '.join(invalid_fields)}") logging.error(f"Available fields: {', '.join(headers)}") - sys.exit(ERROR_CODES['invalid_field']) + sys.exit(ERROR_CODES["invalid_field"]) # Get indices of requested fields indices = [headers.index(field) for field in requested_fields] @@ -163,6 +264,7 @@ def filter_fields(headers, rows, requested_fields): return filtered_headers, filtered_rows + def format_mermaid_gantt(args, releases): """Format releases as a Mermaid Gantt chart.""" output = [] @@ -171,75 +273,110 @@ def format_mermaid_gantt(args, releases): output.append(" axisFormat %m.%y") for release in releases: - name = get_version_string(release['version'], release.get('type')) + name = get_version_string(release["version"], release.get("type")) # name = release['name'] output.append(f" section {name}") # Extract dates - released_date_str = release['lifecycle']['released'].get('isodate') - extended_date_str = release['lifecycle'].get('extended', {}).get('isodate') - eol_date_str = release['lifecycle'].get('eol', {}).get('isodate') # End of life + released_date_str = release["lifecycle"]["released"].get("isodate") + extended_date_str = release["lifecycle"].get("extended", {}).get("isodate") + eol_date_str = release["lifecycle"].get("eol", {}).get("isodate") # End of life # Convert dates to datetime objects - date_format = '%Y-%m-%d' - released_date = datetime.strptime(released_date_str, date_format) if released_date_str else None - extended_date = datetime.strptime(extended_date_str, date_format) if extended_date_str else None - eol_date = datetime.strptime(eol_date_str, date_format) if eol_date_str else None + date_format = "%Y-%m-%d" + released_date = ( + datetime.strptime(released_date_str, date_format) + if released_date_str + else None + ) + extended_date = ( + datetime.strptime(extended_date_str, date_format) + if extended_date_str + else None + ) + eol_date = ( + datetime.strptime(eol_date_str, date_format) if eol_date_str else None + ) # Add 'Release' milestone if released_date: - released_date_formatted = released_date.strftime('%Y-%m-%d') - output.append(f" Release: milestone, {released_date_formatted}, 0m") + released_date_formatted = released_date.strftime("%Y-%m-%d") + output.append( + f" Release: milestone, {released_date_formatted}, 0m" + ) # Standard maintenance if released_date and extended_date: duration_months = duration_in_months(released_date, extended_date) duration_str = f"{duration_months}M" - start_date_str = released_date.strftime('%Y-%m-%d') - output.append(f" Standard maintenance: task, {start_date_str}, {duration_str}") + start_date_str = released_date.strftime("%Y-%m-%d") + output.append( + f" Standard maintenance: task, {start_date_str}, {duration_str}" + ) # Extended maintenance if extended_date and eol_date: # Extended maintenance start as a milestone - extended_date_formatted = extended_date.strftime('%Y-%m-%d') - output.append(f" Extended maintenance: milestone, {extended_date_formatted}, 0m") + extended_date_formatted = extended_date.strftime("%Y-%m-%d") + output.append( + f" Extended maintenance: milestone, {extended_date_formatted}, 0m" + ) duration_months = duration_in_months(extended_date, eol_date) duration_str = f"{duration_months}M" - start_date_str = extended_date.strftime('%Y-%m-%d') - output.append(f" Extended maintenance: task, {start_date_str}, {duration_str}") + start_date_str = extended_date.strftime("%Y-%m-%d") + output.append( + f" Extended maintenance: task, {start_date_str}, {duration_str}" + ) # End of maintenance milestone - eol_date_formatted = eol_date.strftime('%Y-%m-%d') - output.append(f" End of maintenance: milestone, {eol_date_formatted}, 0m") + eol_date_formatted = eol_date.strftime("%Y-%m-%d") + output.append( + f" End of maintenance: milestone, {eol_date_formatted}, 0m" + ) elif eol_date: # No extended maintenance, but have eol_date - eol_date_formatted = eol_date.strftime('%Y-%m-%d') - output.append(f" End of maintenance: milestone, {eol_date_formatted}, 0m") + eol_date_formatted = eol_date.strftime("%Y-%m-%d") + output.append( + f" End of maintenance: milestone, {eol_date_formatted}, 0m" + ) return "\n".join(output) + def format_output(args, releases, output_format, fields=None, no_header=False): """Format and print release data in the specified format.""" formatted_output = None - if output_format in ['json', 'yaml']: + if output_format in ["json", "yaml"]: formatted_output = format_structured_output(releases, output_format) - elif output_format in ['shell', 'markdown']: - formatted_output = format_tabular_output(releases, fields, no_header, output_format) - elif output_format == 'mermaid_gantt': + elif output_format in ["shell", "markdown"]: + formatted_output = format_tabular_output( + releases, fields, no_header, output_format + ) + elif output_format == "mermaid_gantt": formatted_output = format_mermaid_gantt(args, releases) if formatted_output is not None: print(formatted_output) + def sort_releases(releases): - """Sort releases by major and minor version, handling non-integer majors like 'next'.""" + """ + Sort releases by version, respecting versioned schemas. + + Args: + releases: List of release objects to sort + + Returns: + Sorted list of releases + """ + def parse_version_part(part): if isinstance(part, int): return part elif isinstance(part, str): - if part.lower() == 'next': + if part.lower() == "next": # Assign a special value to place 'next' releases appropriately - return float('inf') # Place at the end + return float("inf") # Place at the end else: # Handle other string versions if necessary return -1 @@ -247,12 +384,23 @@ def parse_version_part(part): return -1 # Default value for unexpected types def sort_key(r): - major = parse_version_part(r['version'].get('major')) - minor = parse_version_part(r['version'].get('minor', -1)) - return (major, minor) + """Get sort key for a release, respecting versioned schemas.""" + version = r["version"] + major = parse_version_part(version.get("major")) + minor = parse_version_part(version.get("minor", -1)) + patch = parse_version_part(version.get("patch", -1)) + + # For versions >= 2000.0.0, include patch in sorting + if isinstance(major, int) and major >= 2000: + return (major, minor, patch) + else: + # For versions < 2000.0.0 or non-integer majors (like 'next'), + # exclude patch from sorting + return (major, minor, 0) return sorted(releases, key=sort_key) + def load_releases(input_source, is_url=False): """Load the releases from a file or a URL.""" if is_url: @@ -262,178 +410,264 @@ def load_releases(input_source, is_url=False): return response.json() except requests.RequestException as e: logging.error(f"Error fetching data from URL: {e}") - sys.exit(ERROR_CODES['http_error']) + sys.exit(ERROR_CODES["http_error"]) else: if not os.path.exists(input_source): logging.error(f"Error: File {input_source} does not exist.") - sys.exit(ERROR_CODES['file_not_found']) - with open(input_source, 'r') as file: + sys.exit(ERROR_CODES["file_not_found"]) + with open(input_source, "r") as file: return json.load(file) -def load_split_releases(release_type, input_type, input_url, input_file_prefix, input_format): + +def load_split_releases( + release_type, input_type, input_url, input_file_prefix, input_format +): """Load and split releases based on type.""" - releases_next, releases_stable, releases_patch, releases_nightly, releases_dev = [], [], [], [], [] - types = release_type.split(',') - is_url = (input_type == 'url') + ( + releases_next, + releases_major, + releases_minor, + releases_nightly, + releases_dev, + ) = ( + [], + [], + [], + [], + [], + ) + types = release_type.split(",") + is_url = input_type == "url" - if 'next' in types: - input_file = input_file_prefix + '-next' + '.' + input_format + if "next" in types: + input_file = input_file_prefix + "-next" + "." + input_format if is_url: - input_file = input_url + '/' + input_file - releases_next = load_releases(input_file, is_url=is_url).get('releases', []) + input_file = input_url + "/" + input_file + releases_next = load_releases(input_file, is_url=is_url).get("releases", []) - if 'stable' in types: - input_file = input_file_prefix + '-stable' + '.' + input_format + if "major" in types: + input_file = input_file_prefix + "-major" + "." + input_format if is_url: - input_file = input_url + '/' + input_file - releases_stable = load_releases(input_file, is_url=is_url).get('releases', []) + input_file = input_url + "/" + input_file + releases_major = load_releases(input_file, is_url=is_url).get("releases", []) - if 'patch' in types: - input_file = input_file_prefix + '-patch' + '.' + input_format + if "minor" in types: + input_file = input_file_prefix + "-minor" + "." + input_format if is_url: - input_file = input_url + '/' + input_file - releases_patch = load_releases(input_file, is_url=is_url).get('releases', []) + input_file = input_url + "/" + input_file + releases_minor = load_releases(input_file, is_url=is_url).get("releases", []) - if 'nightly' in types: - input_file = input_file_prefix + '-nightly' + '.' + input_format + if "nightly" in types: + input_file = input_file_prefix + "-nightly" + "." + input_format if is_url: - input_file = input_url + '/' + input_file - releases_nightly = load_releases(input_file, is_url=is_url).get('releases', []) + input_file = input_url + "/" + input_file + releases_nightly = load_releases(input_file, is_url=is_url).get("releases", []) - if 'dev' in types: - input_file = input_file_prefix + '-dev' + '.' + input_format + if "dev" in types: + input_file = input_file_prefix + "-dev" + "." + input_format if is_url: - input_file = input_url + '/' + input_file - releases_dev = load_releases(input_file, is_url=is_url).get('releases', []) + input_file = input_url + "/" + input_file + releases_dev = load_releases(input_file, is_url=is_url).get("releases", []) + + return ( + releases_next + + releases_major + + releases_minor + + releases_nightly + + releases_dev + ) - return releases_next + releases_stable + releases_patch + releases_nightly + releases_dev # def load_all_releases(args): -def load_all_releases(release_type, input_type, input_url, input_file_prefix, input_format, no_input_split=False): +def load_all_releases( + release_type, + input_type, + input_url, + input_file_prefix, + input_format, + no_input_split=False, +): """Load releases either from a single source or split by type.""" - # if args.no_input_split: if no_input_split: - # return load_releases(args.input, is_url=(args.input_type == 'url')).get('releases', []) - return load_releases(input_source, is_url=(input_type == 'url')).get('releases', []) + return load_releases(input_url, is_url=(input_type == "url")).get( + "releases", [] + ) else: - # return load_split_releases(input) - return load_split_releases(release_type, input_type, input_url, input_file_prefix, input_format) + return load_split_releases( + release_type, + input_type, + input_url, + input_file_prefix, + input_format, + ) + def parse_arguments(): """Parse command-line arguments.""" - parser = argparse.ArgumentParser(description="Process and filter releases data from a file or URL.") + parser = argparse.ArgumentParser( + description="Process and filter releases data from a file or URL." + ) - parser.add_argument('--input-format', type=str, choices=['yaml', 'json'], - default=DEFAULTS['QUERY_INPUT_FORMAT'], - help="Input format: 'yaml' or 'json' (default: json).") + parser.add_argument( + "--input-format", + type=str, + choices=["yaml", "json"], + default=DEFAULTS["QUERY_INPUT_FORMAT"], + help="Input format: 'yaml' or 'json' (default: json).", + ) - parser.add_argument('--input-file-prefix', type=str, - default=DEFAULTS['QUERY_INPUT_FILE_PREFIX'], - help="The prefix to get input files (default: releases).") + parser.add_argument( + "--input-file-prefix", + type=str, + default=DEFAULTS["QUERY_INPUT_FILE_PREFIX"], + help="The prefix to get input files (default: releases).", + ) - parser.add_argument('--input-type', choices=['file', 'url'], - default=DEFAULTS['QUERY_INPUT_TYPE'], - help="Specify if the input type (default: url).") + parser.add_argument( + "--input-type", + choices=["file", "url"], + default=DEFAULTS["QUERY_INPUT_TYPE"], + help="Specify if the input type (default: url).", + ) - parser.add_argument('--input-url', type=str, - default=DEFAULTS['QUERY_INPUT_URL'], - help="Input URL to the releases data. Defaults to gardenlinux-glrd S3 URL.") + parser.add_argument( + "--input-url", + type=str, + default=DEFAULTS["QUERY_INPUT_URL"], + help="Input URL to the releases data. Defaults to gardenlinux-glrd S3 URL.", + ) - parser.add_argument('--no-input-split', action='store_true', - help="Do not split Input into stable+patch and nightly. No additional input-files *-nightly and *-dev will be parsed.") + parser.add_argument( + "--no-input-split", + action="store_true", + help="Do not split Input into major+minor and nightly. No additional " + "input-files *-nightly and *-dev will be parsed.", + ) - parser.add_argument('--output-format', - choices=['json', 'yaml', 'markdown', 'mermaid_gantt', 'shell'], - default=DEFAULTS['QUERY_OUTPUT_TYPE'], - help="Output format: json, yaml, markdown, mermaid_gantt, shell (default).") + parser.add_argument( + "--output-format", + choices=["json", "yaml", "markdown", "mermaid_gantt", "shell"], + default=DEFAULTS["QUERY_OUTPUT_TYPE"], + help="Output format: json, yaml, markdown, mermaid_gantt, shell (default).", + ) - parser.add_argument('--output-description', type=str, - default=DEFAULTS['QUERY_OUTPUT_DESCRIPTION'], - help="Description, added to certain outputs, e.g. mermaid (default: 'Garden Linux Releases').") + parser.add_argument( + "--output-description", + type=str, + default=DEFAULTS["QUERY_OUTPUT_DESCRIPTION"], + help="Description, added to certain outputs, e.g. mermaid " + "(default: 'Garden Linux Releases').", + ) - parser.add_argument('--active', action='store_true', - help="Show only active releases.") + parser.add_argument( + "--active", action="store_true", help="Show only active releases." + ) - parser.add_argument('--archived', action='store_true', - help="Show only archived releases.") + parser.add_argument( + "--archived", action="store_true", help="Show only archived releases." + ) - parser.add_argument('--latest', action='store_true', - help="Show the latest active major.minor release.") + parser.add_argument( + "--latest", + action="store_true", + help="Show the latest active major.minor.patch release.", + ) - parser.add_argument('--type', type=str, - default=DEFAULTS['QUERY_TYPE'], - help="Filter by release types (comma-separated list, default: stable,patch). E.g., --type stable,patch,nightly,dev,next") + parser.add_argument( + "--type", + type=str, + default=DEFAULTS["QUERY_TYPE"], + help="Filter by release types (comma-separated list, default: " + "major,minor). E.g., --type major,minor,nightly,dev,next", + ) - parser.add_argument('--version', type=str, - help="Filter by a specific version (major or major.minor). E.g., --version 1312 or --version 1312.0") + parser.add_argument( + "--version", + type=str, + help="Filter by a specific version (major or major.minor.patch). " + "E.g., --version 1312 or --version 1312.0 or --version 1312.0.0", + ) - parser.add_argument('--fields', type=str, - help=("Comma-separated list of fields to output. Possible fields: " + - ",".join(DEFAULTS['POSSIBLE_FIELDS_MAP'].keys()) + - " (default: " + DEFAULTS['QUERY_FIELDS'] + ")")) + parser.add_argument( + "--fields", + type=str, + help=( + "Comma-separated list of fields to output. Possible fields: " + + ",".join(DEFAULTS["POSSIBLE_FIELDS_MAP"].keys()) + + " (default: " + + DEFAULTS["QUERY_FIELDS"] + + ")" + ), + ) - parser.add_argument('--no-header', action='store_true', - help="Omit the header in shell output.") + parser.add_argument( + "--no-header", + action="store_true", + help="Omit the header in shell output.", + ) - parser.add_argument('-V', action='version', version=f'%(prog)s {get_version()}') + parser.add_argument("-V", action="version", version=f"%(prog)s {get_version()}") return parser.parse_args() + def get_platform_from_flavor(flavor): """Extract platform from flavor string.""" try: - return flavor.split('-')[0] + return flavor.split("-")[0] except Exception as e: logging.error(f"Error extracting platform from flavor {flavor}: {e}") - sys.exit(ERROR_CODES['format_error']) + sys.exit(ERROR_CODES["format_error"]) + def prepare_oci_flavor_url(flavor, version, platform): """Create OCI URL for container/bare platforms.""" try: if platform == "container": - return { - 'oci': f"{DEFAULTS['CONTAINER_REGISTRY']}:{version}" - } + return {"oci": f"{DEFAULTS['CONTAINER_REGISTRY']}:{version}"} elif platform == "bare": - flavor_base = '-'.join(flavor.split('-')[:-1]) - return { - 'oci': f"{DEFAULTS['CONTAINER_REGISTRY']}/{flavor_base}:{version}" - } + flavor_base = "-".join(flavor.split("-")[:-1]) + return {"oci": f"{DEFAULTS['CONTAINER_REGISTRY']}/{flavor_base}:{version}"} except Exception as e: logging.error(f"Error creating URLs for container/bare platforms: {e}") - sys.exit(ERROR_CODES['format_error']) + sys.exit(ERROR_CODES["format_error"]) + def prepare_regular_flavor_urls(flavor, version, commit_short, platform): """Create metadata and image URLs for regular platforms.""" try: - image_ext = DEFAULTS['PLATFORM_EXTENSIONS'].get(platform, "raw") - base_url = (f"{DEFAULTS['ARTIFACTS_S3_BASE_URL']}/" - f"{DEFAULTS['ARTIFACTS_S3_PREFIX']}" - f"{flavor}-{version}-{commit_short}") + image_ext = DEFAULTS["PLATFORM_EXTENSIONS"].get(platform, "raw") + base_url = ( + f"{DEFAULTS['ARTIFACTS_S3_BASE_URL']}/" + f"{DEFAULTS['ARTIFACTS_S3_PREFIX']}" + f"{flavor}-{version}-{commit_short}" + ) base_filename = f"{flavor}-{version}-{commit_short}" return { - 'metadata': f"{base_url}/{base_filename}.manifest", - 'image': f"{base_url}/{base_filename}.{image_ext}" + "metadata": f"{base_url}/{base_filename}.manifest", + "image": f"{base_url}/{base_filename}.{image_ext}", } except Exception as e: logging.error(f"Error creating URLs for flavor {flavor}: {e}") - sys.exit(ERROR_CODES['format_error']) + sys.exit(ERROR_CODES["format_error"]) + def format_flavors_with_urls(release): """Format flavors as a map with metadata and image URLs.""" """Return empty dict if no flavors in release.""" - if not release.get('flavors'): + if not release.get("flavors"): return {} try: - version = f"{release['version']['major']}.{release['version'].get('minor', 0)}" - commit_short = release['git'].get('commit_short', '') + # Get version string respecting versioned schemas + version = get_version_string(release["version"], release.get("type")) + commit_short = release["git"].get("commit_short", "") regular_flavors = {} oci_flavors = {} - for flavor in sorted(release['flavors']): + for flavor in sorted(release["flavors"]): platform = get_platform_from_flavor(flavor) # Handle OCI-based platforms @@ -453,19 +687,19 @@ def format_flavors_with_urls(release): except Exception as e: logging.error(f"Error formatting flavors with URLs: {e}") - sys.exit(ERROR_CODES['format_error']) + sys.exit(ERROR_CODES["format_error"]) + def get_oci_url(release): """Return the OCI image URL for a release.""" try: - if release['type'] in ['stable', 'next']: - version = str(release['version']['major']) - else: - version = f"{release['version']['major']}.{release['version'].get('minor', 0)}" + # Get version string respecting versioned schemas + version = get_version_string(release["version"], release.get("type")) return f"{DEFAULTS['CONTAINER_REGISTRY']}:{version}" except Exception as e: logging.error(f"Error getting OCI URL: {e}") - sys.exit(ERROR_CODES['format_error']) + sys.exit(ERROR_CODES["format_error"]) + def process_query(args): """Process the query based on command line arguments.""" @@ -476,7 +710,7 @@ def process_query(args): args.input_url, args.input_file_prefix, args.input_format, - args.no_input_split + args.no_input_split, ) if not releases: @@ -508,12 +742,18 @@ def process_query(args): # Format and output the results format_output(args, releases, args.output_format, args.fields, args.no_header) + def main(): - parser = argparse.ArgumentParser(description='Garden Linux Release Database Query Tool') - parser.add_argument('--version', action='version', version=f'%(prog)s {get_version()}') + parser = argparse.ArgumentParser( + description="Garden Linux Release Database Query Tool" + ) + parser.add_argument( + "--version", action="version", version=f"%(prog)s {get_version()}" + ) args = parse_arguments() process_query(args) + if __name__ == "__main__": main() diff --git a/glrd/schema_v1.py b/glrd/schema_v1.py new file mode 100644 index 0000000..5f06b9b --- /dev/null +++ b/glrd/schema_v1.py @@ -0,0 +1,289 @@ +""" +Schema v1 for Garden Linux releases with versions < 2000.0.0. +This schema version does not require the patch field in version objects. +""" + +# Schema v1 for releases that don't have patch field +SCHEMA_V1 = { + "next": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["next"]}, + "version": { + "type": "object", + "properties": {"major": {"enum": ["next"]}}, + "required": ["major"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "extended": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "extended", "eol"], + }, + }, + "required": ["name", "type", "version", "lifecycle"], + }, + "major": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["major"]}, + "version": { + "type": "object", + "properties": {"major": {"type": "integer"}}, + "required": ["major"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "extended": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "extended", "eol"], + }, + }, + "required": ["name", "type", "version", "lifecycle"], + }, + "minor": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["minor"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + }, + "required": ["major", "minor"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "eol"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "github": { + "type": "object", + "properties": {"release": {"type": "string", "format": "uri"}}, + "required": ["release"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": [ + "name", + "type", + "version", + "lifecycle", + "git", + "github", + ], + }, + "nightly": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["nightly"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + }, + "required": ["major", "minor"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + } + }, + "required": ["released"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": ["name", "type", "version", "lifecycle", "git"], + }, + "dev": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["dev"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + }, + "required": ["major", "minor"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + } + }, + "required": ["released"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": ["name", "type", "version", "lifecycle", "git"], + }, +} diff --git a/glrd/schema_v2.py b/glrd/schema_v2.py new file mode 100644 index 0000000..41998fe --- /dev/null +++ b/glrd/schema_v2.py @@ -0,0 +1,292 @@ +""" +Schema v2 for Garden Linux releases with versions >= 2000.0.0. +This schema version requires the patch field in version objects. +""" + +# Schema v2 for releases that have patch field +SCHEMA_V2 = { + "next": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["next"]}, + "version": { + "type": "object", + "properties": {"major": {"enum": ["next"]}}, + "required": ["major"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "extended": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "extended", "eol"], + }, + }, + "required": ["name", "type", "version", "lifecycle"], + }, + "major": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["major"]}, + "version": { + "type": "object", + "properties": {"major": {"type": "integer"}}, + "required": ["major"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "extended": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "extended", "eol"], + }, + }, + "required": ["name", "type", "version", "lifecycle"], + }, + "minor": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["minor"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + "patch": {"type": "integer"}, + }, + "required": ["major", "minor", "patch"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + }, + "eol": { + "type": "object", + "properties": { + "isodate": { + "type": ["string"], + "format": "date", + }, + "timestamp": {"type": ["integer"]}, + }, + }, + }, + "required": ["released", "eol"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "github": { + "type": "object", + "properties": {"release": {"type": "string", "format": "uri"}}, + "required": ["release"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": [ + "name", + "type", + "version", + "lifecycle", + "git", + "github", + ], + }, + "nightly": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["nightly"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + "patch": {"type": "integer"}, + }, + "required": ["major", "minor", "patch"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + } + }, + "required": ["released"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": ["name", "type", "version", "lifecycle", "git"], + }, + "dev": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "type": {"enum": ["dev"]}, + "version": { + "type": "object", + "properties": { + "major": {"type": "integer"}, + "minor": {"type": "integer"}, + "patch": {"type": "integer"}, + }, + "required": ["major", "minor", "patch"], + }, + "lifecycle": { + "type": "object", + "properties": { + "released": { + "type": "object", + "properties": { + "isodate": { + "type": "string", + "format": "date", + }, + "timestamp": {"type": "integer"}, + }, + "required": ["isodate", "timestamp"], + } + }, + "required": ["released"], + }, + "git": { + "type": "object", + "properties": { + "commit": { + "type": "string", + "pattern": "^[0-9a-f]{40}$", + }, + "commit_short": { + "type": "string", + "pattern": "^[0-9a-f]{7,8}$", + }, + }, + "required": ["commit", "commit_short"], + }, + "flavors": {"type": "array", "items": {"type": "string"}}, + "attributes": { + "type": "object", + "properties": {"source_repo": {"type": "boolean", "default": True}}, + "required": ["source_repo"], + }, + }, + "required": ["name", "type", "version", "lifecycle", "git"], + }, +} diff --git a/glrd/update.py b/glrd/update.py index 4276df0..12dc079 100755 --- a/glrd/update.py +++ b/glrd/update.py @@ -6,47 +6,87 @@ import boto3 -from glrd.manage import ( - download_all_s3_files, - upload_all_local_files +from glrd.manage import download_all_s3_files, upload_all_local_files +from glrd.util import ( + DEFAULTS, + ERROR_CODES, + get_version, + get_flavors_from_git, + get_s3_artifacts_data, + get_flavors_from_s3_artifacts, ) -from glrd.util import * - -from python_gardenlinux_lib.flavors.parse_flavors import * -from python_gardenlinux_lib.s3.s3 import * logger = logging.getLogger(__name__) + def parse_arguments(): """Parse command-line arguments.""" - parser = argparse.ArgumentParser(description="Update existing releases with fields that were added to the schema.") + parser = argparse.ArgumentParser( + description="Update existing releases with fields that were added to the schema." + ) - parser.add_argument('--log-level', type=str, - choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], - default='INFO', help="Set the logging level") + parser.add_argument( + "--log-level", + type=str, + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + default="INFO", + help="Set the logging level", + ) - parser.add_argument('--s3-download', action='store_true', - help="Download files from S3 first") + parser.add_argument( + "--s3-download", + action="store_true", + help="Download files from S3 first", + ) - parser.add_argument('--s3-update', action='store_true', - help="Upload files to S3 after processing") + parser.add_argument( + "--s3-update", + action="store_true", + help="Upload files to S3 after processing", + ) - parser.add_argument('--s3-bucket-name', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_NAME'], - help="Name of S3 bucket for artifacts") + parser.add_argument( + "--s3-bucket-name", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_NAME"], + help="Name of S3 bucket for artifacts", + ) + + parser.add_argument( + "--s3-bucket-region", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_REGION"], + help="Region for S3 bucket", + ) + + parser.add_argument( + "--s3-bucket-prefix", + type=str, + default=DEFAULTS["GLRD_S3_BUCKET_PREFIX"], + help="Prefix for S3 bucket objects", + ) - parser.add_argument('--s3-bucket-region', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_REGION'], - help="Region for S3 bucket") + parser.add_argument( + "--version", + type=str, + help="Only process releases with this version (format: major.minor.patch)", + ) - parser.add_argument('--s3-bucket-prefix', type=str, - default=DEFAULTS['GLRD_S3_BUCKET_PREFIX'], - help="Prefix for S3 bucket objects") + parser.add_argument( + "--fix-patch-versions", + action="store_true", + default=True, + help="Fix missing patch version fields in release names and version " + "objects (default: True)", + ) - parser.add_argument('--version', type=str, - help="Only process releases with this version (format: major.minor)") + parser.add_argument( + "--no-fix-patch-versions", + action="store_true", + help="Disable fixing of missing patch version fields", + ) - parser.add_argument('-V', action='version', version=f'%(prog)s {get_version()}') + parser.add_argument("-V", action="version", version=f"%(prog)s {get_version()}") args = parser.parse_args() @@ -56,51 +96,64 @@ def parse_arguments(): # Parse version if provided if args.version: try: - parts = args.version.split('.') - if len(parts) != 2: - raise ValueError("Version must be in format major.minor") + parts = args.version.split(".") + if len(parts) != 3: + raise ValueError("Version must be in format major.minor.patch") args.version_major = int(parts[0]) args.version_minor = int(parts[1]) + args.version_patch = int(parts[2]) except (ValueError, IndexError) as e: logging.error(f"Invalid version format: {e}") sys.exit(ERROR_CODES["parameter_missing"]) + # Handle patch version fixing flags + if args.no_fix_patch_versions: + args.fix_patch_versions = False + return args + def load_releases_from_file(json_file): """Load releases from a JSON file.""" try: - with open(json_file, 'r') as file: + with open(json_file, "r") as file: data = json.load(file) - return data.get('releases', []) + return data.get("releases", []) except Exception as e: logging.error(f"Error loading releases from {json_file}: {e}") sys.exit(ERROR_CODES["file_not_found"]) + def save_releases(releases, json_file): """Save releases to a JSON file.""" try: - with open(json_file, 'w') as file: - json.dump({'releases': releases}, file, separators=(',', ':'), ensure_ascii=False) + with open(json_file, "w") as file: + json.dump( + {"releases": releases}, + file, + separators=(",", ":"), + ensure_ascii=False, + ) except Exception as e: logging.error(f"Error saving releases to {json_file}: {e}") sys.exit(ERROR_CODES["output_error"]) + def fetch_s3_bucket_contents(args): """Fetch all objects from S3 bucket once and return them.""" try: - s3_client = boto3.client('s3', region_name=args.s3_bucket_region) + s3_client = boto3.client("s3", region_name=args.s3_bucket_region) bucket_name = args.s3_bucket_name prefix = args.s3_bucket_prefix logging.info(f"Fetching objects from s3://{bucket_name}/{prefix}") - paginator = s3_client.get_paginator('list_objects_v2') + paginator = s3_client.get_paginator("list_objects_v2") all_objects = [] for page in paginator.paginate(Bucket=bucket_name, Prefix=prefix): - if 'Contents' in page: - all_objects.extend(page['Contents']) + if "Contents" in page: + all_objects.extend(page["Contents"]) logging.info(f"Found {len(all_objects)} objects") return all_objects @@ -109,61 +162,112 @@ def fetch_s3_bucket_contents(args): logging.error(f"Error fetching S3 bucket contents: {e}") sys.exit(ERROR_CODES["s3_error"]) + def update_flavors(release): """Update flavors for a release.""" - if 'git' not in release: + if "git" not in release: logger.debug(f"Skipping flavor update for {release['name']} - no git info") return release - commit = release['git']['commit'] - version = release['version'] + commit = release["git"]["commit"] + version = release["version"] - # First try flavors.yaml - flavors = parse_flavors_commit( - commit, - version=version, - query_s3=False, - logger=logger - ) + # First try flavors.yaml using gardenlinux library + flavors = get_flavors_from_git(commit) # If no flavors found, try S3 if not flavors: - logger.info(f"No flavors found in flavors.yaml for {release['name']}, checking S3...") - artifacts_data = get_s3_artifacts( - DEFAULTS['ARTIFACTS_S3_BUCKET_NAME'], - DEFAULTS['ARTIFACTS_S3_PREFIX'], - logger=logger + logger.info( + f"No flavors found in flavors.yaml for {release['name']}, checking S3..." + ) + # Get S3 artifacts using gardenlinux library + artifacts_data = get_s3_artifacts_data( + DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"], + DEFAULTS["ARTIFACTS_S3_PREFIX"], + DEFAULTS["ARTIFACTS_S3_CACHE_FILE"], ) if artifacts_data: - flavors = parse_flavors_commit( - commit, - version=version, - query_s3=True, - s3_objects=artifacts_data, - logger=logger - ) + flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit) else: logger.warning(f"No artifacts data available from S3 for {release['name']}") if flavors: - release['flavors'] = flavors - logger.info(f"Updated flavors for {release['name']}: found {len(flavors)} flavors") + release["flavors"] = flavors + logger.info( + f"Updated flavors for {release['name']}: found {len(flavors)} flavors" + ) else: logger.warning(f"No flavors found for {release['name']}") return release + def update_source_repo_attribute(releases): """Update source repo attribute for releases.""" for release in releases: - major = int(release['version'].get('major', 0)) - minor = int(release['version'].get('minor', 0)) + major = int(release["version"].get("major", 0)) + minor = int(release["version"].get("minor", 0)) + patch = int(release["version"].get("patch", 0)) - if (major > 1592) or (major == 1592 and minor >= 4): - release['attributes'] = {'source_repo': True} + if (major > 1592) or (major == 1592 and minor >= 4 and patch >= 0): + release["attributes"] = {"source_repo": True} else: - release['attributes'] = {'source_repo': False} + release["attributes"] = {"source_repo": False} + + +def fix_patch_version_fields(releases): + """Fix missing patch version fields in releases based on versioned schemas.""" + fixed_count = 0 + + for release in releases: + release_type = release.get("type") + version = release.get("version", {}) + + # Only fix minor, nightly, and dev releases + if release_type in ["minor", "nightly", "dev"]: + major = version.get("major") + minor = version.get("minor") + patch = version.get("patch") + + # Only add patch field for versions >= 2000.0.0 (v2 schema) + if major is not None and major >= 2000: + # If patch is missing, set it to 0 + if patch is None: + version["patch"] = 0 + patch = 0 + fixed_count += 1 + + # Fix the name if it's missing the patch version + current_name = release.get("name", "") + expected_name = f"{release_type}-{major}.{minor}.{patch}" + + if current_name != expected_name: + release["name"] = expected_name + fixed_count += 1 + logging.debug(f"Fixed name: {current_name} -> {expected_name}") + else: + # For versions < 2000.0.0 (v1 schema), ensure patch field is NOT present + if "patch" in version: + del version["patch"] + fixed_count += 1 + logging.debug( + f"Removed patch field for v1 schema release: {release.get('name', '')}" + ) + + # Ensure name doesn't have patch version for v1 schema + current_name = release.get("name", "") + expected_name = f"{release_type}-{major}.{minor}" + + if current_name != expected_name: + release["name"] = expected_name + fixed_count += 1 + logging.debug( + f"Fixed name for v1 schema: {current_name} -> {expected_name}" + ) + + return fixed_count + def process_releases(args): """Process all release files.""" @@ -173,7 +277,7 @@ def process_releases(args): logging.info(f"Downloading files from S3 bucket {args.s3_bucket_name}") download_all_s3_files( bucket_name=args.s3_bucket_name, - bucket_prefix=args.s3_bucket_prefix + bucket_prefix=args.s3_bucket_prefix, ) logging.info("Successfully downloaded files from S3") except Exception as e: @@ -181,37 +285,61 @@ def process_releases(args): sys.exit(ERROR_CODES["s3_error"]) # Get artifacts from artifacts bucket - logging.info(f"Fetching artifacts data from S3 bucket {DEFAULTS['ARTIFACTS_S3_BUCKET_NAME']}") - artifacts_data = get_s3_artifacts(DEFAULTS['ARTIFACTS_S3_BUCKET_NAME'], DEFAULTS['ARTIFACTS_S3_PREFIX']) + logging.info( + f"Fetching artifacts data from S3 bucket {DEFAULTS['ARTIFACTS_S3_BUCKET_NAME']}" + ) + # Get S3 artifacts using gardenlinux library + artifacts_data = get_s3_artifacts_data( + DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"], + DEFAULTS["ARTIFACTS_S3_PREFIX"], + DEFAULTS["ARTIFACTS_S3_CACHE_FILE"], + ) - if not artifacts_data or not artifacts_data.get('artifacts'): + if not artifacts_data or not artifacts_data.get("artifacts"): logging.error("Failed to fetch artifacts data from S3") return - logging.info(f"Successfully fetched {len(artifacts_data.get('artifacts', []))} artifacts") + logging.info( + f"Successfully fetched {len(artifacts_data.get('artifacts', []))} artifacts" + ) # Get list of JSON files to process - json_files = [f for f in os.listdir('.') if f.endswith('.json') and f.startswith('releases-')] + json_files = [ + f for f in os.listdir(".") if f.endswith(".json") and f.startswith("releases-") + ] if not json_files: logging.warning("No JSON files found to process") return - logging.info(f"Found {len(json_files)} JSON files to process: {', '.join(json_files)}") + logging.info( + f"Found {len(json_files)} JSON files to process: {', '.join(json_files)}" + ) # Process each file successful_files = [] total_releases_processed = 0 total_releases_updated = 0 + total_patch_fixes = 0 for json_file in json_files: try: logging.info(f"\nProcessing file: {json_file}") - with open(json_file, 'r') as f: + with open(json_file, "r") as f: releases = json.load(f) - if isinstance(releases, dict) and 'releases' in releases: - releases = releases['releases'] + if isinstance(releases, dict) and "releases" in releases: + releases = releases["releases"] logging.info(f"Found {len(releases)} releases in {json_file}") + # Fix patch version fields first (if enabled) + patch_fixes = 0 + if args.fix_patch_versions: + patch_fixes = fix_patch_version_fields(releases) + if patch_fixes > 0: + logging.info( + f"Fixed {patch_fixes} patch version fields in {json_file}" + ) + total_patch_fixes += patch_fixes + # Process each release - modified = False + modified = patch_fixes > 0 # File is modified if patch versions were fixed releases_processed = 0 releases_updated = 0 @@ -219,57 +347,88 @@ def process_releases(args): releases_processed += 1 total_releases_processed += 1 - release_name = release.get('name', 'unknown') - release_type = release.get('type', 'unknown') + release_name = release.get("name", "unknown") + release_type = release.get("type", "unknown") - # Only process patch, nightly, and dev releases - if release_type not in ['patch', 'nightly', 'dev']: - logging.debug(f"Skipping {release_type} release {release_name}: not a patch/nightly/dev release") + # Only process minor, nightly, and dev releases + if release_type not in ["minor", "nightly", "dev"]: + logging.debug( + f"Skipping {release_type} release {release_name}: " + f"not a minor/nightly/dev release" + ) continue - version = release.get('version', {}) - version_info = f"{version.get('major', '?')}.{version.get('minor', '?')}" + version = release.get("version", {}) + version_info = ( + f"{version.get('major', '?')}." + f"{version.get('minor', '?')}." + f"{version.get('patch', '?')}" + ) # Skip if version filter is active and doesn't match if args.version: - if (version.get('major') != args.version_major or - version.get('minor') != args.version_minor): - logging.debug(f"Skipping {release_type} release {release_name}: version {version_info} doesn't match filter {args.version}") + if ( + version.get("major") != args.version_major + or version.get("minor") != args.version_minor + or version.get("patch") != args.version_patch + ): + logging.debug( + f"Skipping {release_type} release {release_name}: " + f"version {version_info} doesn't match filter {args.version}" + ) continue - if 'git' not in release: - logging.debug(f"Skipping {release_type} release {release_name}: no git information") + if "git" not in release: + logging.debug( + f"Skipping {release_type} release {release_name}: no git information" + ) continue - commit = release['git'].get('commit') + commit = release["git"].get("commit") if not commit: - logging.debug(f"Skipping {release_type} release {release_name}: no commit hash") + logging.debug( + f"Skipping {release_type} release {release_name}: no commit hash" + ) continue - logging.info(f"Processing {release_type} release {release_name} (version {version_info}, commit {commit[:8]})") + logging.info( + f"Processing {release_type} release {release_name} " + f"(version {version_info}, commit {commit[:8]})" + ) # Update source repo attribute update_source_repo_attribute([release]) # Get flavors for this commit using artifacts data - flavors = parse_flavors_commit(commit, version=version, query_s3=True, s3_objects=artifacts_data) + flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit) if flavors: - release['flavors'] = flavors + release["flavors"] = flavors modified = True releases_updated += 1 total_releases_updated += 1 - logging.info(f"Added {len(flavors)} flavors for {release_name} (version {version_info}, commit {commit[:8]})") + logging.info( + f"Added {len(flavors)} flavors for {release_name} " + f"(version {version_info}, commit {commit[:8]})" + ) else: - logging.info(f"No flavors found for {release_name} (version {version_info}, commit {commit[:8]})") + logging.info( + f"No flavors found for {release_name} " + f"(version {version_info}, commit {commit[:8]})" + ) # Save if modified if modified: - with open(json_file, 'w') as f: - json.dump({'releases': releases}, f, indent=2) + with open(json_file, "w") as f: + json.dump({"releases": releases}, f, indent=2) successful_files.append(json_file) - logging.info(f"Updated {json_file} ({releases_updated}/{releases_processed} releases updated)") + logging.info( + f"Updated {json_file} " + f"({releases_updated}/{releases_processed} releases updated)" + ) else: - logging.info(f"No updates needed for {json_file} ({releases_processed} releases checked)") + logging.info( + f"No updates needed for {json_file} ({releases_processed} releases checked)" + ) except Exception as e: logging.error(f"Error processing {json_file}: {e}", exc_info=True) @@ -287,19 +446,21 @@ def process_releases(args): # Log summary logging.info("\nUpdate Summary:") - logging.info(f"Files processed successfully: {len(successful_files)} out of {len(json_files)}") + logging.info( + f"Files processed successfully: {len(successful_files)} out of {len(json_files)}" + ) logging.info(f"Total releases processed: {total_releases_processed}") logging.info(f"Total releases updated with flavors: {total_releases_updated}") + logging.info(f"Total patch version fields fixed: {total_patch_fixes}") + def main(): args = parse_arguments() # Configure logging with the already uppercase level - logging.basicConfig( - level=args.log_level, - format='%(levelname)s: %(message)s' - ) + logging.basicConfig(level=args.log_level, format="%(levelname)s: %(message)s") process_releases(args) + if __name__ == "__main__": main() diff --git a/glrd/util.py b/glrd/util.py index 9618372..e6a46dd 100644 --- a/glrd/util.py +++ b/glrd/util.py @@ -1,15 +1,23 @@ -import json +import importlib.metadata import logging import os import re import signal import sys from datetime import datetime +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Any, Dict, List, Optional -import importlib.metadata import pytz import yaml +from gardenlinux.constants import GL_REPOSITORY_URL +from gardenlinux.flavors import Parser +from gardenlinux.git import Repository +from gardenlinux.s3 import Bucket + + ERROR_CODES = { "validation_error": 1, "subprocess_output_error": 2, @@ -26,97 +34,111 @@ DEFAULTS = { # Release types - 'RELEASE_TYPES': ['next', 'stable', 'patch', 'nightly', 'dev'], - + "RELEASE_TYPES": ["next", "major", "minor", "nightly", "dev"], # Query defaults - 'QUERY_TYPE': 'stable,patch', - 'QUERY_FIELDS': 'Name,Version,Type,GitCommitShort,ReleaseDate,ExtendedMaintenance,EndOfMaintenance', - 'QUERY_INPUT_TYPE': 'url', - 'QUERY_INPUT_URL': 'https://gardenlinux-glrd.s3.eu-central-1.amazonaws.com', - 'QUERY_INPUT_FILE_PREFIX': 'releases', - 'QUERY_INPUT_FORMAT': 'json', - 'QUERY_OUTPUT_TYPE': 'shell', - 'QUERY_OUTPUT_DESCRIPTION': 'Garden Linux Releases', - + "QUERY_TYPE": "major,minor", + "QUERY_FIELDS": ( + "Name,Version,Type,GitCommitShort,ReleaseDate," + "ExtendedMaintenance,EndOfMaintenance" + ), + "QUERY_INPUT_TYPE": "url", + "QUERY_INPUT_URL": ("https://gardenlinux-glrd.s3.eu-central-1.amazonaws.com"), + "QUERY_INPUT_FILE_PREFIX": "releases", + "QUERY_INPUT_FORMAT": "json", + "QUERY_OUTPUT_TYPE": "shell", + "QUERY_OUTPUT_DESCRIPTION": "Garden Linux Releases", # Manage defaults - 'MANAGE_INPUT_FILE': 'releases-input.yaml', - 'MANAGE_OUTPUT_FORMAT': 'yaml', - 'MANAGE_OUTPUT_FILE_PREFIX': 'releases', - + "MANAGE_INPUT_FILE": "releases-input.json", + "MANAGE_OUTPUT_FORMAT": "json", + "MANAGE_OUTPUT_FILE_PREFIX": "releases", # S3 configuration for glrd storage - 'GLRD_S3_BUCKET_NAME': 'gardenlinux-glrd', - 'GLRD_S3_BUCKET_PREFIX': '', - 'GLRD_S3_BUCKET_REGION': 'eu-central-1', - + "GLRD_S3_BUCKET_NAME": "gardenlinux-glrd", + "GLRD_S3_BUCKET_PREFIX": "", + "GLRD_S3_BUCKET_REGION": "eu-central-1", # S3 configuration for releases artifacts - 'ARTIFACTS_S3_BUCKET_NAME': 'gardenlinux-github-releases', - 'ARTIFACTS_S3_PREFIX': 'objects/', - 'ARTIFACTS_S3_BASE_URL': 'https://gardenlinux-github-releases.s3.amazonaws.com', - + "ARTIFACTS_S3_BUCKET_NAME": "gardenlinux-github-releases", + "ARTIFACTS_S3_PREFIX": "objects/", + "ARTIFACTS_S3_BASE_URL": ("https://gardenlinux-github-releases.s3.amazonaws.com"), + "ARTIFACTS_S3_CACHE_FILE": "artifacts-cache.json", # Garden Linux repository - 'GL_REPO_NAME': 'gardenlinux', - 'GL_REPO_OWNER': 'gardenlinux', - 'GL_REPO_URL': 'https://github.com/gardenlinux/gardenlinux', - + "GL_REPO_NAME": "gardenlinux", + "GL_REPO_OWNER": "gardenlinux", + "GL_REPO_URL": "https://github.com/gardenlinux/gardenlinux", # Container registry configuration - 'CONTAINER_REGISTRY': 'ghcr.io/gardenlinux/gardenlinux', - + "CONTAINER_REGISTRY": "ghcr.io/gardenlinux/gardenlinux", # Platform file extensions - 'PLATFORM_EXTENSIONS': { - 'ali': 'qcow2', - 'aws': 'raw', - 'azure': 'vhd', - 'gcp': 'gcpimage.tar.gz', - 'gdch': 'gcpimage.tar.gz', - 'kvm': 'raw', - 'metal': 'raw', - 'openstack': 'qcow2', - 'openstackbaremetal': 'qcow2', - 'vmware': 'ova', - } + "PLATFORM_EXTENSIONS": { + "ali": "qcow2", + "aws": "raw", + "azure": "vhd", + "gcp": "gcpimage.tar.gz", + "gdch": "gcpimage.tar.gz", + "kvm": "raw", + "metal": "raw", + "openstack": "qcow2", + "openstackbaremetal": "qcow2", + "vmware": "ova", + }, } + def get_version(): - return importlib.metadata.version('glrd') + return importlib.metadata.version("glrd") + def extract_version_data(tag_name): - """Extract major and minor version numbers from a tag.""" - version_regex = re.compile(r'^(\d+)\.?(\d+)?$') + """Extract major, minor and patch version numbers from a tag.""" + version_regex = re.compile(r"^(\d+)\.?(\d+)?\.?(\d+)?$") match = version_regex.match(tag_name) - return (int(match.group(1)), int(match.group(2))) if match else (None, None) + return ( + (int(match.group(1)), int(match.group(2)), int(match.group(3))) + if match + else (None, None, None) + ) + def get_current_timestamp(): """Get current timestamp.""" return int(datetime.now().timestamp()) + def timestamp_to_isotime(timestamp): """Convert timestamp to ISO time string.""" if not timestamp: - return 'N/A' + return "N/A" try: dt = datetime.fromtimestamp(timestamp, pytz.UTC) - return dt.strftime('%H:%M:%S') + return dt.strftime("%H:%M:%S") except (ValueError, TypeError): - logging.error(f"Error converting timestamp to ISO time: {timestamp}") - sys.exit(ERROR_CODES['format_error']) + logging.error(f"Error converting timestamp to ISO time: " f"{timestamp}") + sys.exit(ERROR_CODES["format_error"]) + def isodate_to_timestamp(isodate): """ - Convert an ISO 8601 formatted date (with or without time) to a Unix timestamp. - If only a date is provided, assume the time is 00:00:00 UTC. + Convert an ISO 8601 formatted date (with or without time) to a Unix + timestamp. If only a date is provided, assume the time is 00:00:00 UTC. """ try: # Try parsing with full ISO format (date and time with 'Z' timezone) - return int(datetime.strptime(isodate, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC).timestamp()) + return int( + datetime.strptime(isodate, "%Y-%m-%dT%H:%M:%SZ") + .replace(tzinfo=pytz.UTC) + .timestamp() + ) except ValueError: # If the time part is missing, assume time is 00:00:00 UTC - return int(datetime.strptime(isodate, "%Y-%m-%d").replace(tzinfo=pytz.UTC).timestamp()) + return int( + datetime.strptime(isodate, "%Y-%m-%d").replace(tzinfo=pytz.UTC).timestamp() + ) + def timestamp_to_isodate(timestamp): """Convert timestamp to ISO date.""" dt = datetime.utcfromtimestamp(timestamp) return dt.strftime("%Y-%m-%d") + # Handle SIGPIPE and BrokenPipeError def handle_broken_pipe_error(signum, frame): try: @@ -124,9 +146,150 @@ def handle_broken_pipe_error(signum, frame): except SystemExit: os._exit(0) + # Create a custom dumper class that doesn't generate anchors class NoAliasDumper(yaml.SafeDumper): def ignore_aliases(self, data): return True + signal.signal(signal.SIGPIPE, handle_broken_pipe_error) + + +def get_flavors_from_git(commit: str) -> List[str]: + """ + Get flavors from Git repository using gardenlinux library. + + Args: + commit: Git commit hash (or 'latest') + + Returns: + List of flavor strings + """ + + try: + with TemporaryDirectory() as git_directory: + # Use gardenlinux Repository class for sparse checkout with pygit2 + Repository.checkout_repo_sparse( + git_directory=git_directory, + repo_url=GL_REPOSITORY_URL, + commit=commit if commit != "latest" else None, + pathspecs=["flavors.yaml"], # Only checkout the flavors.yaml file + ) + + flavors_file = Path(git_directory, "flavors.yaml") + if flavors_file.exists(): + with flavors_file.open("r") as fp: + flavors_data = fp.read() + flavors_yaml = yaml.safe_load(flavors_data) + parser = Parser(flavors_yaml) + combinations = parser.filter() + all_flavors = set(combination for _, combination in combinations) + flavors = sorted(all_flavors) + logging.info(f"Found {len(flavors)} flavors in Git") + return flavors + else: + logging.warning("flavors.yaml not found in repository") + return [] + except Exception as exc: + logging.debug(f"Could not get flavors from Git: {exc}") + return [] + + +def get_s3_artifacts_data( + bucket_name: str, + prefix: str, + cache_file: Optional[str] = None, + cache_ttl: int = 3600, +) -> Optional[Dict]: + """ + Get S3 artifacts data using gardenlinux library with caching support. + + Args: + bucket_name: S3 bucket name + prefix: S3 prefix + cache_file: Optional cache file path for S3 object keys + cache_ttl: Cache time-to-live in seconds (default: 1 hour) + + Returns: + Dictionary containing S3 artifacts data with 'index' and 'artifacts' keys + """ + + try: + bucket = Bucket(bucket_name) + + artifacts = bucket.read_cache_file_or_filter( + cache_file, cache_ttl=cache_ttl, Prefix=prefix + ) + + index = {} + for key in artifacts: + try: + parts = key.split("/") + if len(parts) >= 3: + version_commit = parts[1] + if "-" in version_commit: + version_part, commit_part = version_commit.split("-", 1) + if version_part not in index: + index[version_part] = [] + index[version_part].append(key) + except Exception as e: + logging.debug(f"Could not parse version from key {key}: {e}") + + result = {"index": index, "artifacts": artifacts} + logging.info(f"Found {len(artifacts)} artifacts and {len(index)} index entries") + return result + except Exception as e: + logging.error(f"Error getting S3 artifacts: {e}") + return None + + +def get_flavors_from_s3_artifacts( + artifacts_data: Dict, version: Dict[str, Any], commit: str +) -> List[str]: + """ + Extract flavors from S3 artifacts data. + + Args: + artifacts_data: S3 artifacts data dictionary + version: Version dictionary with major, minor, micro + commit: Git commit hash + + Returns: + List of flavor strings + """ + + try: + version_info = f"{version['major']}.{version.get('minor', 0)}" + commit_short = commit[:8] + + # Try index lookup first + search_key = f"{version_info}-{commit_short}" + if search_key in artifacts_data.get("index", {}): + flavors = artifacts_data["index"][search_key] + logging.debug(f"Found flavors in S3 index for {search_key}") + return flavors + else: + # Search through artifacts + found_flavors = set() + for key in artifacts_data.get("artifacts", []): + if version_info in key and commit_short in key: + try: + parts = key.split("/") + if len(parts) >= 2: + flavor_with_version = parts[1] + flavor = flavor_with_version.rsplit(f"-{version_info}", 1)[ + 0 + ] + if flavor: + found_flavors.add(flavor) + except Exception as e: + logging.debug(f"Error parsing artifact key {key}: {e}") + continue + flavors = sorted(found_flavors) + if flavors: + logging.info(f"Found {len(flavors)} flavors in S3 artifacts") + return flavors + except Exception as e: + logging.error(f"Error processing S3 artifacts: {e}") + return [] diff --git a/poetry.lock b/poetry.lock index c8dcf8c..b70f88f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. - -[[package]] -name = "alabaster" -version = "0.7.16" -description = "A light, configurable Sphinx theme" -optional = false -python-versions = ">=3.9" -files = [ - {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, - {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, -] +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "apt-repo" @@ -17,6 +6,7 @@ version = "0.5" description = "Python library to query APT repositories" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "apt-repo-0.5.tar.gz", hash = "sha256:b566195884b8ea59e6b831f814fd106c7e683dccc86ca95f6494a447572f30ea"}, ] @@ -27,61 +17,110 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] -name = "babel" -version = "2.17.0" -description = "Internationalization utilities" +name = "autopep8" +version = "2.3.2" +description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"}, + {file = "autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758"}, +] + +[package.dependencies] +pycodestyle = ">=2.12.0" + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, - {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.36.22" +version = "1.40.47" description = "The AWS SDK for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "boto3-1.36.22-py3-none-any.whl", hash = "sha256:39957eabdce009353d72d131046489fbbfa15891865d5f069f1e8bfa414e6b81"}, - {file = "boto3-1.36.22.tar.gz", hash = "sha256:768c8a4d4a6227fe2258105efa086f1424cba5ca915a5eb2305b2cd979306ad1"}, + {file = "boto3-1.40.47-py3-none-any.whl", hash = "sha256:33b291200cbb042ca8faac0b52a5d460850712641930d32335b75bc65e88653c"}, + {file = "boto3-1.40.47.tar.gz", hash = "sha256:c0ea31655c41b3f9bf46bc370520eafc081ba4c3e79fa564b60e976663abf7e7"}, ] [package.dependencies] -botocore = ">=1.36.22,<1.37.0" +botocore = ">=1.40.47,<1.41.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.11.0,<0.12.0" +s3transfer = ">=0.14.0,<0.15.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.36.22" +version = "1.40.47" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "botocore-1.36.22-py3-none-any.whl", hash = "sha256:75d6b34acb0686ee4d54ff6eb285e78ccfe318407428769d1e3e13351714d890"}, - {file = "botocore-1.36.22.tar.gz", hash = "sha256:59520247d5a479731724f97c995d5a1c2aae3b303b324f39d99efcfad1d3019e"}, + {file = "botocore-1.40.47-py3-none-any.whl", hash = "sha256:0845c5bc49fc9d45938ff3609df7ec1eff0d26c1a4edcd03e16ad2194c3a9a56"}, + {file = "botocore-1.40.47.tar.gz", hash = "sha256:8eb950046ba8afc99dedb0268282b4f9a61bca2c7a6415036bff2beee5e180ca"}, ] [package.dependencies] @@ -90,7 +129,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.23.8)"] +crt = ["awscrt (==0.27.6)"] [[package]] name = "certifi" @@ -98,6 +137,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -109,6 +149,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -188,6 +229,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -283,68 +325,193 @@ files = [ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} + +[[package]] +name = "coverage" +version = "7.10.6" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}, + {file = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}, + {file = "coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}, + {file = "coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}, + {file = "coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}, + {file = "coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}, + {file = "coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}, + {file = "coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}, + {file = "coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}, + {file = "coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}, + {file = "coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}, + {file = "coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}, + {file = "coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}, + {file = "coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}, + {file = "coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}, + {file = "coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}, + {file = "coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}, + {file = "coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}, + {file = "coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}, + {file = "coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}, + {file = "coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}, + {file = "coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78"}, + {file = "coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c"}, + {file = "coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf"}, + {file = "coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}, + {file = "coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" -version = "44.0.1" +version = "45.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] files = [ - {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, - {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, - {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, - {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, - {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, - {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, - {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, + {file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"}, + {file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"}, + {file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"}, + {file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"}, + {file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"}, + {file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"}, + {file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"}, ] [package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} +cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] +pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -353,6 +520,7 @@ version = "8.2.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "deepdiff-8.2.0-py3-none-any.whl", hash = "sha256:5091f2cdfd372b1b9f6bfd8065ba323ae31118dc4e42594371b38c8bea3fd0a4"}, {file = "deepdiff-8.2.0.tar.gz", hash = "sha256:6ec78f65031485735545ffbe7a61e716c3c2d12ca6416886d5e9291fc76c46c3"}, @@ -365,23 +533,13 @@ orderly-set = ">=5.3.0,<6" cli = ["click (==8.1.8)", "pyyaml (==6.0.2)"] optimize = ["orjson"] -[[package]] -name = "docutils" -version = "0.20.1" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] - [[package]] name = "dunamai" version = "1.23.0" description = "Dynamic version generation" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, @@ -391,50 +549,49 @@ files = [ packaging = ">=20.9" [[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "gitdb" -version = "4.0.12" -description = "Git Object Database" +name = "flake8" +version = "7.3.0" +description = "the modular source code checker: pep8 pyflakes and co" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, - {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, + {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, + {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, ] [package.dependencies] -smmap = ">=3.0.1,<6" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.14.0,<2.15.0" +pyflakes = ">=3.4.0,<3.5.0" [[package]] -name = "gitpython" -version = "3.1.44" -description = "GitPython is a Python library used to interact with Git repositories" +name = "gardenlinux" +version = "0.10.0" +description = "Contains tools to work with the features directory of gardenlinux, for example deducting dependencies from feature sets or validating cnames" optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, - {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, -] +python-versions = "^3.13" +groups = ["main"] +files = [] +develop = false [package.dependencies] -gitdb = ">=4.0.1,<5" +apt-repo = "^0.5" +boto3 = "^1.40.30" +click = "^8.2.1" +cryptography = "^45.0.6" +jsonschema = "^4.25.1" +networkx = "^3.5" +oras = "^0.2.38" +pygit2 = "^1.18.2" +pygments = "^2.19.2" +PyYAML = "^6.0.2" -[package.extras] -doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[package.source] +type = "git" +url = "https://github.com/gardenlinux/python-gardenlinux-lib.git" +reference = "0.10.0" +resolved_reference = "02879bd567ed39b5610332afcc6e46197073db0c" [[package]] name = "idna" @@ -442,6 +599,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -450,23 +608,13 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -478,6 +626,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -495,6 +644,7 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -502,13 +652,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.25.1" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, ] [package.dependencies] @@ -519,7 +670,7 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -527,6 +678,7 @@ version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -541,6 +693,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -605,55 +758,80 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + [[package]] name = "networkx" -version = "3.4.2" +version = "3.5" description = "Python package for creating and manipulating graphs and networks" optional = false -python-versions = ">=3.10" +python-versions = ">=3.11" +groups = ["main"] files = [ - {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, - {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, + {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, + {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, ] [package.extras] -default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] [[package]] name = "oras" -version = "0.2.0" +version = "0.2.38" description = "OCI Registry as Storage Python SDK" optional = false python-versions = "*" -files = [] -develop = false +groups = ["main"] +files = [ + {file = "oras-0.2.38-py3-none-any.whl", hash = "sha256:a97658086cea1e1ebb412a5b504005480c70de5d2e81a160d33e60fc187d42f4"}, + {file = "oras-0.2.38.tar.gz", hash = "sha256:58482f78275cb5cc9fdf135248ca878a19204428a51bd3a1ca24c32714d35341"}, +] [package.dependencies] jsonschema = "*" requests = "*" [package.extras] -all = ["docker (==5.0.1)", "jsonschema", "pytest (>=4.6.2)", "requests"] +all = ["boto3 (>=1.33.0)", "docker (==5.0.1)", "jsonschema", "pytest (>=4.6.2)", "requests"] docker = ["docker (==5.0.1)"] +ecr = ["boto3 (>=1.33.0)"] tests = ["pytest (>=4.6.2)"] -[package.source] -type = "git" -url = "https://github.com/oras-project/oras-py.git" -reference = "caf8db5b279382335fbb1f6d7402ed9b73618d37" -resolved_reference = "caf8db5b279382335fbb1f6d7402ed9b73618d37" - [[package]] name = "orderly-set" version = "5.3.0" description = "Orderly set" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, @@ -665,17 +843,48 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -691,6 +900,7 @@ version = "1.7.1" description = "Plugin for Poetry to enable dynamic versioning based on VCS tags" optional = false python-versions = "<4.0,>=3.7" +groups = ["main"] files = [ {file = "poetry_dynamic_versioning-1.7.1-py3-none-any.whl", hash = "sha256:70a4a54bee89aef276e3f2f8841f10a6f140b19c5aeb371a1a6095f84fcbe7b1"}, {file = "poetry_dynamic_versioning-1.7.1.tar.gz", hash = "sha256:7304b8459af7b7114cd83429827c4d3d8b7d29df4129dde8dff61c76f93faaa3"}, @@ -704,26 +914,102 @@ tomlkit = ">=0.4" [package.extras] plugin = ["poetry (>=1.2.0)"] +[[package]] +name = "pycodestyle" +version = "2.14.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, + {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, +] + [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +[[package]] +name = "pyflakes" +version = "3.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, + {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, +] + +[[package]] +name = "pygit2" +version = "1.18.2" +description = "Python bindings for libgit2." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pygit2-1.18.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a84fbc62b0d2103059559b5af7e939289a0f3fc7d0a7ad84d822eaa97a6db687"}, + {file = "pygit2-1.18.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84aa50acba5a2c6bb36863fbcc1d772dc00199f9ea41bb5cac73c5fdad42bce"}, + {file = "pygit2-1.18.2-cp310-cp310-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d7b8570f0df4f0a854c3d3bdcec4a5767b50b0acb13ef163f6b96db593e3611f"}, + {file = "pygit2-1.18.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cccceadab2c772a52081eac4680c3664d2ff21966171d339fee6aaf303ccbe23"}, + {file = "pygit2-1.18.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c51e0b4a733e72212c86c8b3890a4c3572b1cae6d381e56b4d53ba3dafbeecf2"}, + {file = "pygit2-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:970e9214e9146c893249acb9610fda9220fe048ae76c80fd7f36d0ec3381676b"}, + {file = "pygit2-1.18.2-cp310-cp310-win32.whl", hash = "sha256:546f9b8e7bf9d88d77008a82d7d989c624f5756c4fba26af1b8985019985dc8a"}, + {file = "pygit2-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:5383cdfc1315e7d49d7a59a9aa37c4f0f60d08c4de3137f31d20e4be2055ad47"}, + {file = "pygit2-1.18.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3fc89da1426793227e06f2dec5f2df98a0c6806fb4024eec6a125fb7a5042bbf"}, + {file = "pygit2-1.18.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6ab37a87b58032c596c37bcd0e3926cc6071748230f6f0911b7fe398e021ae"}, + {file = "pygit2-1.18.2-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d9642f57943703de3651906f81b9535cb257b3cbe45ecca8f97cf475f1cb6b5f"}, + {file = "pygit2-1.18.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1aa3efba6459e10608900fe26679e3b52ea566761f3e7ef9c0805d69a5548631"}, + {file = "pygit2-1.18.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:25957ccf70e37f3e8020748724a14faf4731ceac69ed00ccbb422f99de0a80cc"}, + {file = "pygit2-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6c9cdbad0888d664b80f30efda055c4c5b8fdae22c709bd57b1060daf8bde055"}, + {file = "pygit2-1.18.2-cp311-cp311-win32.whl", hash = "sha256:91bde9503ad35be55c95251c9a90cfe33cd608042dcc08d3991ed188f41ebec2"}, + {file = "pygit2-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:840d01574e164d9d2428d36d9d32d377091ac592a4b1a3aa3452a5342a3f6175"}, + {file = "pygit2-1.18.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:5eaf2855d78c5ad2a6c2ebf840f8717a8980c93567a91fbc0fc91650747454a4"}, + {file = "pygit2-1.18.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee5dd227e4516577d9edc2b476462db9f0428d3cc1ad5de32e184458f25046ee"}, + {file = "pygit2-1.18.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07e5c39ed67e07dac4eb99bfc33d7ccc105cd7c4e09916751155e7da3e07b6bc"}, + {file = "pygit2-1.18.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12ae4ed05b48bb9f08690c3bb9f96a37a193ed44e1a9a993509a6f1711bb22ae"}, + {file = "pygit2-1.18.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:00919a2eafd975a63025d211e1c1a521bf593f6c822bc61f18c1bc661cbffd42"}, + {file = "pygit2-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3f96a168bafb99e99b95f59b0090171396ad2fb07713e5505ad3e4c16a41d56a"}, + {file = "pygit2-1.18.2-cp312-cp312-win32.whl", hash = "sha256:ff1c99f2f342c3a3ec1847182d236088f1eb32bc6c4f93fbb5cb2514ccbe29f3"}, + {file = "pygit2-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:507b5ea151cb963b77995af0c4fb51333f02f15a05c0b36c33cd3f5518134ceb"}, + {file = "pygit2-1.18.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f65d6114d96cb7a21cc09e8cb0622d0388619adf9cdb5d77d94589a41996b0a8"}, + {file = "pygit2-1.18.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9367df01958f7e538bc3fc665ace55de0d5b72da5b6b5f95c44ae916c39a6f51"}, + {file = "pygit2-1.18.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:eb2993e44aaafac5bcd801c2926dcf87c3f8939ff1c5fb9fe0549a81acd27a03"}, + {file = "pygit2-1.18.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d5dc116d6054cb4e970160c09440da7ded36acfbc4f06ef8e0d38ac275ee12"}, + {file = "pygit2-1.18.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b87e7ab87da09145cb45434e6ad0402695ca72ffb764487ecc09d28abef5507"}, + {file = "pygit2-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a0aa809fd5572c8b1123270263720e458afc9e2069e8d0c1079feebc930e6813"}, + {file = "pygit2-1.18.2-cp313-cp313-win32.whl", hash = "sha256:8c4423b08786d0fcea0c523b82bc5ec52039b01500a3391472786e89cadf1069"}, + {file = "pygit2-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:aeba6398d5c689c90c133e07f698aeb9f9693cfbb5707fccffd18f2d67d37c6d"}, + {file = "pygit2-1.18.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd82d37cf5ce474a74388a04b9fb3c28670f44bc7fe970cabbb477a4d1cb871f"}, + {file = "pygit2-1.18.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:991fe6bcbe914507abfe81be1c96bd5039ec315354e4132efffcb03eb8b363fb"}, + {file = "pygit2-1.18.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d801d272f6331e067bd0d560671311d1ce4bb8f81536675706681ed44cc0d7dc"}, + {file = "pygit2-1.18.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e1ff2d60420c98e6e25fd188069cddf8fa7b0417db7405ce7677a2f546e6b03"}, + {file = "pygit2-1.18.2.tar.gz", hash = "sha256:eca87e0662c965715b7f13491d5e858df2c0908341dee9bde2bc03268e460f55"}, +] + +[package.dependencies] +cffi = ">=1.17.0" + [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] @@ -731,81 +1017,59 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.3.4" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} +pygments = ">=2.7.2" [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" [package.extras] -cli = ["click (>=5.0)"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] -name = "python-gardenlinux-lib" -version = "0.5.0" -description = "Contains tools to work with the features directory of gardenlinux, for example deducting dependencies from feature sets or validating cnames" +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" optional = false -python-versions = "^3.10" -files = [] -develop = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] [package.dependencies] -apt-repo = "^0.5" -boto3 = "*" -cryptography = "^44.0.0" -gitpython = "^3.1.43" -jsonschema = "^4.23.0" -networkx = "^3.3" -oras = {git = "https://github.com/oras-project/oras-py.git", rev = "caf8db5b279382335fbb1f6d7402ed9b73618d37"} -pytest = "^8.3.2" -python-dotenv = "^1.0.1" -PyYAML = "^6.0.2" -sphinx-rtd-theme = "^2.0.0" - -[package.source] -type = "git" -url = "https://github.com/gardenlinux/python-gardenlinux-lib.git" -reference = "main" -resolved_reference = "cac2454ab8d97286860dfbb2754527e7dc84ba2f" +six = ">=1.5" [[package]] name = "pytz" @@ -813,6 +1077,7 @@ version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -824,6 +1089,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -886,6 +1152,7 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -894,7 +1161,6 @@ files = [ [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" -typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" @@ -902,6 +1168,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -923,6 +1190,7 @@ version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, @@ -1031,20 +1299,21 @@ files = [ [[package]] name = "s3transfer" -version = "0.11.2" +version = "0.14.0" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc"}, - {file = "s3transfer-0.11.2.tar.gz", hash = "sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f"}, + {file = "s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456"}, + {file = "s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125"}, ] [package.dependencies] -botocore = ">=1.36.0,<2.0a.0" +botocore = ">=1.37.4,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] [[package]] name = "six" @@ -1052,201 +1321,19 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -[[package]] -name = "smmap" -version = "5.0.2" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, - {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sphinx" -version = "7.4.7" -description = "Python documentation generator" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, - {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, -] - -[package.dependencies] -alabaster = ">=0.7.14,<0.8.0" -babel = ">=2.13" -colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} -docutils = ">=0.20,<0.22" -imagesize = ">=1.3" -Jinja2 = ">=3.1" -packaging = ">=23.0" -Pygments = ">=2.17" -requests = ">=2.30.0" -snowballstemmer = ">=2.2" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.9" -tomli = {version = ">=2", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] - -[[package]] -name = "sphinx-rtd-theme" -version = "2.0.0" -description = "Read the Docs theme for Sphinx" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, - {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, -] - -[package.dependencies] -docutils = "<0.21" -sphinx = ">=5,<8" -sphinxcontrib-jquery = ">=4,<5" - -[package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, - {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, - {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, - {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jquery" -version = "4.1" -description = "Extension to include jQuery on newer Sphinx releases" -optional = false -python-versions = ">=2.7" -files = [ - {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, - {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, -] - -[package.dependencies] -Sphinx = ">=1.8" - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, - {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["defusedxml (>=0.7.1)", "pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, - {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - [[package]] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -1255,87 +1342,37 @@ files = [ [package.extras] widechars = ["wcwidth"] -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - [[package]] name = "tomlkit" version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - [[package]] name = "urllib3" version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "2eee80c9619baa4d7a025d302be198c88db668ea2b10922f8569e0f6648c03a9" +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "91303029a0704cc2c587e6b7f6980e6c97743eb130eb10a931edfd1df57c5ce7" diff --git a/pyproject.toml b/pyproject.toml index 92a7a33..5271ff5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ readme = "README.md" packages = [{include = "glrd"}] [tool.poetry.dependencies] -python = "^3.10" +python = "^3.13" poetry-dynamic-versioning = "*" boto3 = "*" deepdiff = "*" @@ -19,9 +19,17 @@ jsonschema = "*" PyYAML = "*" pytz = "*" python-dateutil = "*" +pygit2 = "*" requests = "*" tabulate = "*" -python-gardenlinux-lib = { git = "https://github.com/gardenlinux/python-gardenlinux-lib.git", branch = "main" } +gardenlinux = { git = "https://github.com/gardenlinux/python-gardenlinux-lib.git", tag = "0.10.0" } + +[tool.poetry.group.dev.dependencies] +pytest = "*" +pytest-cov = "*" +black = "*" +flake8 = "*" +autopep8 = "*" [tool.poetry-dynamic-versioning] enable = true @@ -43,3 +51,19 @@ glrd = "glrd.query:main" [build-system] requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning>=1.0.0,<2.0.0"] build-backend = "poetry_dynamic_versioning.backend" + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +addopts = [ + "-v", + "--tb=short", + "--strict-markers", + "--disable-warnings", +] +markers = [ + "integration: marks tests as integration tests", + "unit: marks tests as unit tests", +] diff --git a/releases-input.yaml b/releases-input.yaml index ad52083..0e2f480 100644 --- a/releases-input.yaml +++ b/releases-input.yaml @@ -1,8 +1,8 @@ # This file is supposed to provide data that is not automatically retrievable. # e.g. lifecycle.extended and lifecycle.eol data releases: - - name: stable-27 - type: 'stable' + - name: major-27 + type: 'major' version: major: 27 lifecycle: @@ -13,8 +13,8 @@ releases: isodate: '2020-12-09' eol: isodate: '2021-03-09' - - name: stable-184 - type: 'stable' + - name: major-184 + type: 'major' version: major: 184 lifecycle: @@ -25,8 +25,8 @@ releases: isodate: '2020-04-29' eol: isodate: '2021-07-29' - - name: stable-318 - type: 'stable' + - name: major-318 + type: 'major' version: major: 318 lifecycle: @@ -37,8 +37,8 @@ releases: isodate: '2021-10-28' eol: isodate: '2023-01-28' - - name: stable-576 - type: 'stable' + - name: major-576 + type: 'major' version: major: 576 lifecycle: @@ -49,8 +49,8 @@ releases: isodate: '2023-05-17' eol: isodate: '2023-08-17' - - name: stable-934 - type: 'stable' + - name: major-934 + type: 'major' version: major: 934 lifecycle: @@ -61,8 +61,8 @@ releases: isodate: '2023-12-05' eol: isodate: '2024-03-05' - - name: stable-1312 - type: 'stable' + - name: major-1312 + type: 'major' version: major: 1312 lifecycle: @@ -73,8 +73,8 @@ releases: isodate: '2024-05-03' eol: isodate: '2024-08-03' - - name: stable-1443 - type: 'stable' + - name: major-1443 + type: 'major' version: major: 1443 lifecycle: @@ -85,8 +85,8 @@ releases: isodate: '2024-09-13' eol: isodate: '2025-01-13' - - name: stable-1592 - type: 'stable' + - name: major-1592 + type: 'major' version: major: 1592 lifecycle: diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4ebb16e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Tests package for GLRD diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..46cdff4 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,43 @@ +""" +Pytest configuration and fixtures for GLRD tests. +""" + +import os +import tempfile +import shutil +import pytest +from pathlib import Path + + +@pytest.fixture +def test_dir(): + """Create a temporary directory for test files.""" + test_dir = tempfile.mkdtemp(prefix="glrd_test_") + yield test_dir + # Cleanup + if os.path.exists(test_dir): + shutil.rmtree(test_dir) + + +@pytest.fixture +def manage_script(): + """Path to the manage.py script.""" + script_path = Path(__file__).parent.parent / "glrd" / "manage.py" + assert script_path.exists(), f"manage.py not found at {script_path}" + return str(script_path) + + +@pytest.fixture +def query_script(): + """Path to the query.py script.""" + script_path = Path(__file__).parent.parent / "glrd" / "query.py" + assert script_path.exists(), f"query.py not found at {script_path}" + return str(script_path) + + +@pytest.fixture +def glrd_script(): + """Path to the glrd script (query.py).""" + script_path = Path(__file__).parent.parent / "glrd" / "query.py" + assert script_path.exists(), f"query.py not found at {script_path}" + return str(script_path) diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100644 index 0000000..8eb155c --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,1178 @@ +""" +Integration tests for glrd-manage and glrd commands. + +This test suite validates: +1. glrd-manage command execution with various version formats +2. JSON output file generation +3. glrd query verification of generated files +4. Schema validation for both v1 and v2 schemas +""" + +import os +import json +import subprocess +import sys +import pytest + + +@pytest.mark.integration +class TestGLRDIntegration: + """Integration tests for GLRD manage and query commands.""" + + def run_manage_command(self, manage_script, args, expect_success=True): + """Run glrd-manage command and return result.""" + cmd = [sys.executable, manage_script] + args + result = subprocess.run( + cmd, + capture_output=True, + text=True, + cwd=os.path.dirname(manage_script), + ) + + if expect_success: + assert result.returncode == 0, ( + f"Command failed: {' '.join(cmd)}\n" + f"STDOUT: {result.stdout}\n" + f"STDERR: {result.stderr}" + ) + else: + assert result.returncode != 0, ( + f"Command should have failed but succeeded: {' '.join(cmd)}\n" + f"STDOUT: {result.stdout}" + ) + + return result + + def run_manage_command_stdin( + self, manage_script, args, stdin_data, expect_success=True + ): + """Run glrd-manage command with data on stdin and return result.""" + cmd = [sys.executable, manage_script] + args + result = subprocess.run( + cmd, + input=stdin_data, + capture_output=True, + text=True, + cwd=os.path.dirname(manage_script), + ) + + if expect_success: + assert result.returncode == 0, ( + f"Command failed: {' '.join(cmd)}\n" + f"STDOUT: {result.stdout}\n" + f"STDERR: {result.stderr}" + ) + else: + assert result.returncode != 0, ( + f"Command should have failed but succeeded: {' '.join(cmd)}\n" + f"STDOUT: {result.stdout}" + ) + + return result + + def run_query_command(self, query_script, args): + """Run glrd query command and return result.""" + cmd = [sys.executable, query_script] + args + result = subprocess.run( + cmd, + capture_output=True, + text=True, + cwd=os.path.dirname(query_script), + ) + + assert result.returncode == 0, ( + f"Query command failed: {' '.join(cmd)}\n" + f"STDOUT: {result.stdout}\n" + f"STDERR: {result.stderr}" + ) + return result + + def load_json_output(self, filepath): + """Load and parse JSON output file.""" + assert os.path.exists(filepath), f"Output file not found: {filepath}" + with open(filepath, "r") as f: + return json.load(f) + + # ============================================================================ + # SCHEMA VALIDATION TESTS + # ============================================================================ + + @pytest.mark.parametrize( + "version,expected_name", + [ + ("1990.0", "nightly-1990.0"), + ("1999.0", "nightly-1999.0"), + ("1500.5", "nightly-1500.5"), + ], + ) + def test_v1_schema_valid_versions( + self, test_dir, manage_script, version, expected_name + ): + """Test v1 schema with valid version formats (versions < 2000).""" + prefix = os.path.join(test_dir, f'releases-nightly-{version.replace(".", "_")}') + output_file = f"{prefix}-nightly.json" + + # Create release + _ = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + version, + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Verify output file was created + assert os.path.exists( + output_file + ), f"Output file not created for version {version}" + + # Load and verify JSON structure + data = self.load_json_output(output_file) + assert "releases" in data + assert len(data["releases"]) == 1 + + release = data["releases"][0] + assert release["name"] == expected_name + assert release["type"] == "nightly" + assert release["version"]["major"] == int(version.split(".")[0]) + assert release["version"]["minor"] == int(version.split(".")[1]) + # v1 schema should not have patch field + assert "patch" not in release["version"] + + @pytest.mark.parametrize( + "version", + [ + "1990.0.1", + "1999.0.5", + "1500.5.2", + ], + ) + def test_v1_schema_invalid_versions(self, test_dir, manage_script, version): + """Test v1 schema with invalid version formats (versions < 2000 with patch).""" + prefix = os.path.join(test_dir, f'releases-nightly-{version.replace(".", "_")}') + output_file = f"{prefix}-nightly.json" + + # Should fail because v1 schema doesn't support patch field + result = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + version, + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + expect_success=False, + ) + + # Verify error message mentions v1 schema + assert "v1 schema" in result.stderr + assert "patch version" in result.stderr + + # Verify no output file was created + assert not os.path.exists( + output_file + ), f"Output file should not be created for invalid version {version}" + + @pytest.mark.parametrize( + "version,expected_name", + [ + ("2000.0.0", "nightly-2000.0.0"), + ("2222.0.0", "nightly-2222.0.0"), + ("3000.5.2", "nightly-3000.5.2"), + ], + ) + def test_v2_schema_valid_versions( + self, test_dir, manage_script, version, expected_name + ): + """Test v2 schema with valid version formats (versions >= 2000).""" + prefix = os.path.join(test_dir, f'releases-nightly-{version.replace(".", "_")}') + output_file = f"{prefix}-nightly.json" + + # Create release + _ = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + version, + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Verify output file was created + assert os.path.exists( + output_file + ), f"Output file not created for version {version}" + + # Load and verify JSON structure + data = self.load_json_output(output_file) + assert "releases" in data + assert len(data["releases"]) == 1 + + release = data["releases"][0] + assert release["name"] == expected_name + assert release["type"] == "nightly" + + version_parts = version.split(".") + assert release["version"]["major"] == int(version_parts[0]) + assert release["version"]["minor"] == int(version_parts[1]) + assert release["version"]["patch"] == int(version_parts[2]) + + @pytest.mark.parametrize( + "version", + [ + "2000.0", + "2222.0", + "3000.5", + ], + ) + def test_v2_schema_invalid_versions(self, test_dir, manage_script, version): + """Test v2 schema with invalid version formats (versions >= 2000 without patch).""" + prefix = os.path.join(test_dir, f'releases-nightly-{version.replace(".", "_")}') + output_file = f"{prefix}-nightly.json" + + # Should fail because v2 schema requires patch field + result = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + version, + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + expect_success=False, + ) + + # Verify error message mentions v2 schema + assert "v2 schema" in result.stderr + assert "missing patch version" in result.stderr + + # Verify no output file was created + assert not os.path.exists( + output_file + ), f"Output file should not be created for invalid version {version}" + + def test_boundary_version_validation(self, test_dir, manage_script): + """Test validation at the boundary between v1 and v2 schemas (version 2000).""" + # Test exactly at boundary - should require v2 schema + prefix = os.path.join(test_dir, "releases-nightly-boundary") + output_file = f"{prefix}-nightly.json" + + # This should fail - 2000.0 is missing patch for v2 schema + result = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "2000.0", + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + expect_success=False, + ) + + assert "v2 schema" in result.stderr + assert "missing patch version" in result.stderr + + # This should succeed - 2000.0.0 is correct v2 format + self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "2000.0.0", + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "nightly-2000.0.0" + assert release["version"]["patch"] == 0 + + def test_schema_error_message_clarity(self, test_dir, manage_script): + """Test that error messages are clear and helpful.""" + # Test v1 schema error message + result = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "1990.0.1", + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + os.path.join(test_dir, "test"), + "--no-query", + ], + expect_success=False, + ) + + assert "v1 schema" in result.stderr + assert "patch version" in result.stderr + assert "major.minor" in result.stderr + + # Test v2 schema error message + result = self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "2222.0", + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + os.path.join(test_dir, "test"), + "--no-query", + ], + expect_success=False, + ) + + assert "v2 schema" in result.stderr + assert "missing patch version" in result.stderr + assert "major.minor.patch" in result.stderr + + # ============================================================================ + # RELEASE CREATION TESTS + # ============================================================================ + + def test_major_release_validation(self, test_dir, manage_script): + """Test major release validation (always uses v1 schema).""" + prefix = os.path.join(test_dir, "releases-major") + output_file = f"{prefix}-major.json" + + self.run_manage_command( + manage_script, + [ + "--create", + "major", + "--version", + "27", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "major-27" + assert release["type"] == "major" + assert release["version"]["major"] == 27 + # Major releases don't have minor/patch in version object + assert "minor" not in release["version"] + assert "patch" not in release["version"] + + def test_minor_release_validation(self, test_dir, manage_script): + """Test minor release validation with different schema versions.""" + prefix = os.path.join(test_dir, "releases-minor") + output_file = f"{prefix}-minor.json" + + # Test v1 schema minor release - create directly via stdin + minor_v1_json = { + "releases": [ + { + "name": "minor-27.0", + "type": "minor", + "version": {"major": 27, "minor": 0}, + "lifecycle": { + "released": {"isodate": "2020-06-09", "timestamp": 1591694693}, + "eol": {"isodate": "2021-03-09", "timestamp": 1615248000}, + }, + "git": { + "commit": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "commit_short": "504eae6", + }, + "github": { + "release": "https://github.com/gardenlinux/gardenlinux/releases/tag/27.0" + }, + "flavors": ["container-amd64"], + "attributes": {"source_repo": True}, + } + ] + } + + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + json.dumps(minor_v1_json), + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "minor-27.0" + assert "patch" not in release["version"] + + # Clean up + if os.path.exists(output_file): + os.remove(output_file) + + # Test v2 schema minor release - create directly via stdin + minor_v2_json = { + "releases": [ + { + "name": "minor-2000.0.0", + "type": "minor", + "version": {"major": 2000, "minor": 0, "patch": 0}, + "lifecycle": { + "released": {"isodate": "2025-01-01", "timestamp": 1735689600}, + "eol": {"isodate": "2025-10-01", "timestamp": 1759363200}, + }, + "git": { + "commit": "b94a8fe5ccb19ba61c4c0873d391e987982fbbd4", + "commit_short": "2000ae6", + }, + "github": { + "release": "https://github.com/gardenlinux/gardenlinux/releases/tag/2000.0.0" + }, + "flavors": ["container-amd64"], + "attributes": {"source_repo": True}, + } + ] + } + + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + json.dumps(minor_v2_json), + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "minor-2000.0.0" + assert release["version"]["patch"] == 0 + + def test_dev_release_validation(self, test_dir, manage_script): + """Test dev release validation with different schema versions.""" + prefix = os.path.join(test_dir, "releases-dev") + output_file = f"{prefix}-dev.json" + + # Test v1 schema dev release + self.run_manage_command( + manage_script, + [ + "--create", + "dev", + "--version", + "1990.0", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "dev-1990.0" + assert "patch" not in release["version"] + + # Clean up + if os.path.exists(output_file): + os.remove(output_file) + + # Test v2 schema dev release + self.run_manage_command( + manage_script, + [ + "--create", + "dev", + "--version", + "2000.0.0", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "dev-2000.0.0" + assert release["version"]["patch"] == 0 + + # ============================================================================ + # QUERY AND VERIFICATION TESTS + # ============================================================================ + + def test_glrd_query_verification(self, test_dir, manage_script, query_script): + """Test that generated JSON files can be queried with glrd.""" + prefix = os.path.join(test_dir, "releases-nightly") + output_file = f"{prefix}-nightly.json" + + # Create a valid v1 schema release + self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "1990.0", + "--commit", + "deadbeef1234567890abcdef1234567890abcdef", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Verify the file exists + assert os.path.exists(output_file) + + # Query the generated file + result = self.run_query_command( + query_script, + [ + "--type", + "nightly", + "--input-type", + "file", + "--input-file-prefix", + prefix, + "--output-format", + "json", + ], + ) + + # Parse query result + query_data = json.loads(result.stdout) + assert "releases" in query_data + assert len(query_data["releases"]) == 1 + + release = query_data["releases"][0] + assert release["name"] == "nightly-1990.0" + assert release["type"] == "nightly" + + def test_query_created_major_release(self, test_dir, manage_script, query_script): + """Create a major release and query it using glrd.""" + prefix = os.path.join(test_dir, "releases-major-ci") + output_file = f"{prefix}-major.json" + + # Create major release + self.run_manage_command( + manage_script, + [ + "--create", + "major", + "--version", + "1312", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Ensure file exists + assert os.path.exists(output_file) + + # Query using glrd + result = self.run_query_command( + query_script, + [ + "--type", + "major", + "--input-type", + "file", + "--input-file-prefix", + prefix, + "--output-format", + "json", + ], + ) + + data = json.loads(result.stdout) + assert "releases" in data + assert len(data["releases"]) == 1 + assert data["releases"][0]["name"] == "major-1312" + assert data["releases"][0]["type"] == "major" + + def test_query_created_dev_release(self, test_dir, manage_script, query_script): + """Create a dev release and query it using glrd.""" + prefix = os.path.join(test_dir, "releases-dev-ci") + output_file = f"{prefix}-dev.json" + + # Create dev release (v1 schema) + self.run_manage_command( + manage_script, + [ + "--create", + "dev", + "--version", + "1990.0", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Ensure file exists + assert os.path.exists(output_file) + + # Query using glrd + result = self.run_query_command( + query_script, + [ + "--type", + "dev", + "--input-type", + "file", + "--input-file-prefix", + prefix, + "--output-format", + "json", + ], + ) + + data = json.loads(result.stdout) + assert "releases" in data + assert len(data["releases"]) == 1 + assert data["releases"][0]["name"] == "dev-1990.0" + assert data["releases"][0]["type"] == "dev" + + def test_create_query_minor_via_input_stdin( + self, test_dir, manage_script, query_script + ): + """Create a minor release via input-stdin and query it using glrd.""" + prefix = os.path.join(test_dir, "releases-minor-ci") + output_file = f"{prefix}-minor.json" + + minor_json = { + "releases": [ + { + "name": "minor-1592.6", + "type": "minor", + "version": {"major": 1592, "minor": 6}, + "lifecycle": { + "released": { + "isodate": "2025-02-19", + "timestamp": 1739951325, + }, + "eol": { + "isodate": "2025-08-12", + "timestamp": 1754956800, + }, + }, + "git": { + "commit": "cb05e11f0481b72d0a30da3662295315b220a436", + "commit_short": "cb05e11f", + }, + "github": { + "release": "https://github.com/gardenlinux/gardenlinux/releases/tag/1592.6" + }, + "flavors": ["container-amd64", "container-arm64"], + "attributes": {"source_repo": True}, + } + ] + } + + stdin_payload = json.dumps(minor_json) + + # Create via stdin + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + stdin_payload, + ) + + # Ensure file exists + assert os.path.exists(output_file) + + # Query using glrd + result = self.run_query_command( + query_script, + [ + "--type", + "minor", + "--input-type", + "file", + "--input-file-prefix", + prefix, + "--output-format", + "json", + ], + ) + + data = json.loads(result.stdout) + assert "releases" in data + assert len(data["releases"]) == 1 + assert data["releases"][0]["name"] == "minor-1592.6" + assert data["releases"][0]["type"] == "minor" + + def test_create_query_next_release(self, test_dir, manage_script, query_script): + """Create a next release (via stdin for full lifecycle) and query it using glrd.""" + prefix = os.path.join(test_dir, "releases-next-ci") + output_file = f"{prefix}-next.json" + + next_json = { + "releases": [ + { + "name": "next", + "type": "next", + "version": {"major": "next"}, + "lifecycle": { + "released": { + "isodate": "2025-12-01", + "timestamp": 1764547200, + }, + "extended": { + "isodate": "2026-06-01", + "timestamp": 1780262400, + }, + "eol": { + "isodate": "2026-09-01", + "timestamp": 1788297600, + }, + }, + } + ] + } + + stdin_payload = json.dumps(next_json) + + # Create via stdin + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + stdin_payload, + ) + + # Ensure file exists + assert os.path.exists(output_file) + + # Query using glrd + result = self.run_query_command( + query_script, + [ + "--type", + "next", + "--input-type", + "file", + "--input-file-prefix", + prefix, + "--output-format", + "json", + ], + ) + + data = json.loads(result.stdout) + assert "releases" in data + assert len(data["releases"]) == 1 + assert data["releases"][0]["name"] == "next" + assert data["releases"][0]["type"] == "next" + + def test_multiple_releases_query(self, test_dir, manage_script, query_script): + """Test querying multiple releases of different schema versions.""" + # Create multiple releases + releases = [ + ("1990.0", "nightly-1990.0"), # v1 schema + ("2000.0.0", "nightly-2000.0.0"), # v2 schema + ] + + all_releases = [] + + for version, expected_name in releases: + prefix = os.path.join( + test_dir, f'releases-nightly-{version.replace(".", "_")}' + ) + output_file = f"{prefix}-nightly.json" + + # Create release + self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + version, + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Load and verify + data = self.load_json_output(output_file) + all_releases.extend(data["releases"]) + + # Create combined file for querying with correct naming + combined_file = os.path.join(test_dir, "releases-nightly-combined-nightly.json") + with open(combined_file, "w") as f: + json.dump({"releases": all_releases}, f) + + # Query combined file using direct file path + result = self.run_query_command( + query_script, + [ + "--type", + "nightly", + "--input-type", + "file", + "--input-file-prefix", + combined_file.replace("-nightly.json", ""), + "--output-format", + "json", + ], + ) + + # Verify query result + query_data = json.loads(result.stdout) + assert len(query_data["releases"]) == 2 + + # Check that both releases are present + names = [r["name"] for r in query_data["releases"]] + assert "nightly-1990.0" in names + assert "nightly-2000.0.0" in names + + # ============================================================================ + # ADVANCED FEATURE TESTS + # ============================================================================ + + def test_update_release_with_custom_lifecycle(self, test_dir, manage_script): + """Test updating a release with custom lifecycle flags.""" + prefix = os.path.join(test_dir, "releases-custom-lifecycle") + output_file = f"{prefix}-major.json" + + # Create a major release with custom lifecycle dates + self.run_manage_command( + manage_script, + [ + "--create", + "major", + "--version", + "9999", + "--lifecycle-released-isodatetime", + "2025-01-15T10:30:00", + "--lifecycle-extended-isodatetime", + "2025-07-15T10:30:00", + "--lifecycle-eol-isodatetime", + "2025-10-15T10:30:00", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + ) + + # Verify the file exists and contains custom dates + assert os.path.exists(output_file) + data = self.load_json_output(output_file) + release = data["releases"][0] + assert release["name"] == "major-9999" + assert release["type"] == "major" + + # Check custom lifecycle dates + lifecycle = release["lifecycle"] + assert lifecycle["released"]["isodate"] == "2025-01-15" + assert lifecycle["extended"]["isodate"] == "2025-07-15" + assert lifecycle["eol"]["isodate"] == "2025-10-15" + + def test_create_delete_release(self, test_dir, manage_script): + """Test that delete functionality requires query and fails appropriately with --no-query.""" + prefix = os.path.join(test_dir, "releases-create-delete") + output_file = f"{prefix}-nightly.json" + + # Create a release via stdin first + releases_json = { + "releases": [ + { + "name": "nightly-1995.0", + "type": "nightly", + "version": {"major": 1995, "minor": 0}, + "lifecycle": { + "released": {"isodate": "2025-01-01", "timestamp": 1735689600} + }, + "git": { + "commit": "deadbeef1234567890abcdef1234567890abcdef", + "commit_short": "deadbeef", + }, + "github": { + "release": "https://github.com/gardenlinux/gardenlinux/releases/tag/1995.0" + }, + "flavors": ["container-amd64"], + "attributes": {"source_repo": True}, + } + ] + } + + # Create release via stdin + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + prefix, + "--no-query", + ], + json.dumps(releases_json), + ) + + # Verify the file exists + assert os.path.exists(output_file) + data = self.load_json_output(output_file) + assert len(data["releases"]) == 1 + + # Test that delete with --no-query fails appropriately + result = self.run_manage_command( + manage_script, + [ + "--delete", + "nightly-1995.0", + "--no-query", + "--output-format", + "json", + "--output-file-prefix", + prefix, + ], + expect_success=False, + ) + + # Verify error message + assert "--delete' cannot run with '--no-query'" in result.stderr + + # Test that delete without --input flag also fails (since it doesn't query S3 in tests) + result = self.run_manage_command( + manage_script, + [ + "--delete", + "nightly-1995.0", + "--output-format", + "json", + "--output-file-prefix", + prefix, + ], + expect_success=False, + ) + + # This should fail because there's no S3 data in tests + assert "not found in the existing data" in result.stderr + + def test_create_releases_with_custom_commit(self, test_dir, manage_script): + """Test creating each release type with custom commit.""" + custom_commit = "deadbeef1234567890abcdef1234567890abcdef" + custom_commit_short = "deadbeef" + + # Test major release with custom commit + major_prefix = os.path.join(test_dir, "releases-major-custom-commit") + major_file = f"{major_prefix}-major.json" + + self.run_manage_command( + manage_script, + [ + "--create", + "major", + "--version", + "8888", + "--commit", + custom_commit, + "--output-format", + "json", + "--output-file-prefix", + major_prefix, + "--no-query", + ], + ) + + assert os.path.exists(major_file) + data = self.load_json_output(major_file) + release = data["releases"][0] + assert release["name"] == "major-8888" + assert release["type"] == "major" + # Major releases don't have git commit info in the schema + + # Test minor release with custom commit (v1 schema) - use stdin to avoid EOL issues + minor_prefix = os.path.join(test_dir, "releases-minor-custom-commit") + minor_file = f"{minor_prefix}-minor.json" + + minor_json = { + "releases": [ + { + "name": "minor-1990.0", + "type": "minor", + "version": {"major": 1990, "minor": 0}, + "lifecycle": { + "released": {"isodate": "2025-01-01", "timestamp": 1735689600}, + "eol": {"isodate": "2025-10-01", "timestamp": 1759363200}, + }, + "git": {"commit": custom_commit, "commit_short": "deadbeef"}, + "github": { + "release": "https://github.com/gardenlinux/gardenlinux/releases/tag/1990.0" + }, + "flavors": ["container-amd64"], + "attributes": {"source_repo": True}, + } + ] + } + + self.run_manage_command_stdin( + manage_script, + [ + "--input-stdin", + "--output-format", + "json", + "--output-file-prefix", + minor_prefix, + "--no-query", + ], + json.dumps(minor_json), + ) + + assert os.path.exists(minor_file) + data = self.load_json_output(minor_file) + release = data["releases"][0] + assert release["name"] == "minor-1990.0" + assert release["type"] == "minor" + assert release["git"]["commit"] == custom_commit + assert release["git"]["commit_short"] == custom_commit_short + + # Test nightly release with custom commit (v1 schema) + nightly_prefix = os.path.join(test_dir, "releases-nightly-custom-commit") + nightly_file = f"{nightly_prefix}-nightly.json" + + self.run_manage_command( + manage_script, + [ + "--create", + "nightly", + "--version", + "1991.0", + "--commit", + custom_commit, + "--output-format", + "json", + "--output-file-prefix", + nightly_prefix, + "--no-query", + ], + ) + + assert os.path.exists(nightly_file) + data = self.load_json_output(nightly_file) + release = data["releases"][0] + assert release["name"] == "nightly-1991.0" + assert release["type"] == "nightly" + assert release["git"]["commit"] == custom_commit + assert release["git"]["commit_short"] == custom_commit_short + + # Test dev release with custom commit (v1 schema) + dev_prefix = os.path.join(test_dir, "releases-dev-custom-commit") + dev_file = f"{dev_prefix}-dev.json" + + self.run_manage_command( + manage_script, + [ + "--create", + "dev", + "--version", + "1992.0", + "--commit", + custom_commit, + "--output-format", + "json", + "--output-file-prefix", + dev_prefix, + "--no-query", + ], + ) + + assert os.path.exists(dev_file) + data = self.load_json_output(dev_file) + release = data["releases"][0] + assert release["name"] == "dev-1992.0" + assert release["type"] == "dev" + assert release["git"]["commit"] == custom_commit + assert release["git"]["commit_short"] == custom_commit_short + + # Note: next releases don't use git commit info and require lifecycle dates, + # so we skip testing next releases with custom commit diff --git a/tests/test_schema_validation.py b/tests/test_schema_validation.py new file mode 100644 index 0000000..9e1eba8 --- /dev/null +++ b/tests/test_schema_validation.py @@ -0,0 +1,137 @@ +""" +Unit tests for schema validation logic. +""" + +import pytest +from glrd.manage import validate_input_version_format + + +@pytest.mark.unit +class TestSchemaValidation: + """Unit tests for schema validation functions.""" + + def test_v1_schema_valid_formats(self): + """Test v1 schema validation with valid formats.""" + # Valid v1 formats (versions < 2000) + valid_cases = [ + ("1990.0", "nightly"), + ("1999.0", "minor"), + ("1500.5", "dev"), + ] + + for version, release_type in valid_cases: + is_valid, error_message = validate_input_version_format( + version, release_type + ) + assert is_valid, f"Version {version} should be valid for {release_type}" + assert error_message is None + + def test_v1_schema_invalid_formats(self): + """Test v1 schema validation with invalid formats.""" + # Invalid v1 formats (versions < 2000 with patch) + invalid_cases = [ + ("1990.0.1", "nightly"), + ("1999.0.5", "minor"), + ("1500.5.2", "dev"), + ] + + for version, release_type in invalid_cases: + is_valid, error_message = validate_input_version_format( + version, release_type + ) + assert ( + not is_valid + ), f"Version {version} should be invalid for {release_type}" + assert "v1 schema" in error_message + assert "patch version" in error_message + + def test_v2_schema_valid_formats(self): + """Test v2 schema validation with valid formats.""" + # Valid v2 formats (versions >= 2000) + valid_cases = [ + ("2000.0.0", "nightly"), + ("2222.0.0", "minor"), + ("3000.5.2", "dev"), + ] + + for version, release_type in valid_cases: + is_valid, error_message = validate_input_version_format( + version, release_type + ) + assert is_valid, f"Version {version} should be valid for {release_type}" + assert error_message is None + + def test_v2_schema_invalid_formats(self): + """Test v2 schema validation with invalid formats.""" + # Invalid v2 formats (versions >= 2000 without patch) + invalid_cases = [ + ("2000.0", "nightly"), + ("2222.0", "minor"), + ("3000.5", "dev"), + ] + + for version, release_type in invalid_cases: + is_valid, error_message = validate_input_version_format( + version, release_type + ) + assert ( + not is_valid + ), f"Version {version} should be invalid for {release_type}" + assert "v2 schema" in error_message + assert "missing patch version" in error_message + + def test_major_next_releases(self): + """Test that major and next releases don't use version validation.""" + # Major and next releases should always pass validation + test_cases = [ + ("1990.0", "major"), + ("2000.0.0", "major"), + ("1990.0.1", "major"), + ("2000.0", "major"), + ("1990.0", "next"), + ("2000.0.0", "next"), + ("1990.0.1", "next"), + ("2000.0", "next"), + ] + + for version, release_type in test_cases: + is_valid, error_message = validate_input_version_format( + version, release_type + ) + assert is_valid, f"Version {version} should be valid for {release_type}" + assert error_message is None + + def test_boundary_validation(self): + """Test validation at the boundary between v1 and v2 schemas.""" + # Exactly at boundary - should require v2 schema + is_valid, error_message = validate_input_version_format("2000.0", "nightly") + assert not is_valid + assert "v2 schema" in error_message + assert "missing patch version" in error_message + + # Just below boundary - should use v1 schema + is_valid, error_message = validate_input_version_format("1999.0.1", "nightly") + assert not is_valid + assert "v1 schema" in error_message + assert "patch version" in error_message + + def test_invalid_version_formats(self): + """Test validation with completely invalid version formats.""" + invalid_formats = [ + "1990", # Missing minor + "1990.0.0.0", # Too many parts + "abc.def", # Non-numeric + "", # Empty + ] + + for version in invalid_formats: + # This should raise an exception or return invalid + try: + is_valid, error_message = validate_input_version_format( + version, "nightly" + ) + # If no exception, should be invalid + assert not is_valid, f"Version {version} should be invalid" + except (ValueError, IndexError): + # Expected for invalid formats + pass