diff --git a/poetry.lock b/poetry.lock index 9c3cbffc..fab6068d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -134,7 +134,7 @@ version = "1.39.10" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "boto3-1.39.10-py3-none-any.whl", hash = "sha256:5b2aa5b7d075491c7c6cb539255a44f0ebd70ffc540e32dfd56eb8a361390e5e"}, {file = "boto3-1.39.10.tar.gz", hash = "sha256:62a1623c8c9495625c88778869b75b692a7ef226fcbb96ea58365b9f043a2ae2"}, @@ -154,7 +154,7 @@ version = "1.39.10" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "botocore-1.39.10-py3-none-any.whl", hash = "sha256:d279f252a37bfa7d8a628404ea745c0163ed6260e037d813ce0aef2996ffea28"}, {file = "botocore-1.39.10.tar.gz", hash = "sha256:7fe00007304fe4627d7dd7b8605c6666a560651e91100f47391ae40a310dc092"}, @@ -174,7 +174,7 @@ version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, @@ -186,7 +186,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, @@ -267,7 +267,7 @@ version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, @@ -389,7 +389,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} [[package]] name = "cryptography" @@ -397,7 +397,7 @@ version = "45.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"}, {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"}, @@ -503,7 +503,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -542,7 +542,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -560,7 +560,7 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -634,7 +634,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -711,6 +711,52 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "moto" +version = "5.1.9" +description = "A library that allows you to easily mock out tests based on AWS infrastructure" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "moto-5.1.9-py3-none-any.whl", hash = "sha256:e9ba7e4764a6088ccc34e3cc846ae719861ca202409fa865573de40a3e805b9b"}, + {file = "moto-5.1.9.tar.gz", hash = "sha256:0c4f0387b06b5d24c0ce90f8f89f31a565cc05789189c5d59b5df02594f2e371"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +cryptography = ">=35.0.0" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0,<0.25.5 || >0.25.5" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] +events = ["jsonpath_ng"] +glue = ["pyparsing (>=3.0.7)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +quicksight = ["jsonschema"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.1)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.1)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -843,7 +889,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, @@ -893,7 +939,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1002,7 +1048,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -1018,6 +1064,26 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "responses" +version = "0.25.7" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, + {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + [[package]] name = "rich" version = "14.0.0" @@ -1170,7 +1236,7 @@ version = "0.13.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, @@ -1209,7 +1275,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1430,7 +1496,7 @@ version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, @@ -1442,7 +1508,37 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "0.14.2" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, +] + [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "00e38dd1eac56b7b030310490d2e0da9aa380314d92834ce06deec4f2e5e3d85" +content-hash = "16d757c1d7526b62d923b28c642f55847ab6fad6505101a1313ca7fb3e6f0800" diff --git a/pyproject.toml b/pyproject.toml index aa6bd544..579c1acf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ pygments = "^2.19.1" [tool.poetry.group.dev.dependencies] bandit = "^1.8.3" black = "^25.1.0" +moto = "^5.1.9" [tool.poetry.group.docs.dependencies] sphinx-rtd-theme = "^3.0.2" diff --git a/src/gardenlinux/features/cname.py b/src/gardenlinux/features/cname.py index fcd90265..d3e54e2a 100644 --- a/src/gardenlinux/features/cname.py +++ b/src/gardenlinux/features/cname.py @@ -136,7 +136,7 @@ def platform(self) -> str: :return: (str) Flavor """ - return re.split("[_-]", self._flavor, 1)[0] + return re.split("[_-]", self._flavor, maxsplit=1)[0] @property def version(self) -> Optional[str]: diff --git a/tests/s3/__init__.py b/tests/s3/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/s3/conftest.py b/tests/s3/conftest.py new file mode 100644 index 00000000..734e7b76 --- /dev/null +++ b/tests/s3/conftest.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- + +from dataclasses import dataclass +from moto import mock_aws +from hashlib import md5, sha256 +import boto3 +import pytest + +from gardenlinux.features.cname import CName as RealCName + +BUCKET_NAME = "test-bucket" +REGION = "us-east-1" + + +@dataclass(frozen=True) +class S3Env: + s3: object + bucket_name: str + tmp_path: str + cname: str + + +def make_cname( + flavor: str = "testcname", + arch: str = "amd64", + version: str = "1234.1", + commit: str = "abc123", +) -> str: + """ + Helper function to build cname. Can be used to customized the cname. + """ + return f"{flavor}-{arch}-{version}-{commit}" + + +# Helpers to compute digests for fake files +def dummy_digest(data: bytes, algo: str) -> str: + """ + Dummy for file_digest() to compute hashes for in-memory byte streams + """ + content = data.read() + data.seek(0) # Reset byte cursor to start for multiple uses + + if algo == "md5": + return md5(content) # nosec B324 + elif algo == "sha256": + return sha256(content) + else: + raise ValueError(f"Unsupported algo: {algo}") + + +@pytest.fixture(autouse=True) +def s3_setup(tmp_path, monkeypatch): + """ + Provides a clean S3 setup for each test. + """ + with mock_aws(): + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + + monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) + + cname = make_cname() + yield S3Env(s3, BUCKET_NAME, tmp_path, cname) diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py new file mode 100644 index 00000000..94b8118d --- /dev/null +++ b/tests/s3/test_bucket.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- + +""" +Test the `Bucket` class in `src/gardenlinux/s3/bucket.py` by using +mock AWS interactions provided by the `moto` module. + +A mock AWS environment is provided by the `s3_setup` fixture found in `conftest.py`. +""" + +from pathlib import Path +import io +import pytest + +from gardenlinux.s3.bucket import Bucket + + +REGION = "us-east-1" + + +def test_objects_empty(s3_setup): + """ + List objects from empty bucket. + """ + # Arrange + env = s3_setup + + # Act + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + + # Assert + assert list(bucket.objects) == [] + + +def test_upload_file_and_list(s3_setup): + """ + Create a fake file in a temporary directory, upload and try + to list it + """ + # Arrange + env = s3_setup + + test_file = env.tmp_path / "example.txt" + test_file.write_text("hello moto") + + # Act + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + bucket.upload_file(str(test_file), "example.txt") + + all_keys = [obj.key for obj in bucket.objects] + + # Assert + assert "example.txt" in all_keys + + +def test_download_file(s3_setup): + """ + Try to download a file pre-existing in the bucket + """ + # Arrange + env = s3_setup + env.s3.Object(env.bucket_name, "file.txt").put(Body=b"some data") + + # Act + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + target_path = env.tmp_path / "downloaded.txt" + bucket.download_file("file.txt", str(target_path)) + + # Assert + assert target_path.read_text() == "some data" + + +def test_upload_fileobj(s3_setup): + """ + Upload a file-like in-memory object to the bucket + """ + # Arrange + env = s3_setup + + # Act + # Create in-memory binary stream (file content) + data = io.BytesIO(b"Test Data") + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + bucket.upload_fileobj(data, "binary.obj") + + obj = env.s3.Object(env.bucket_name, "binary.obj").get() + + # Assert + assert obj["Body"].read() == b"Test Data" + + +def test_download_fileobj(s3_setup): + """ + Download data into a in-memory object + """ + # Arange + env = s3_setup + # Put some object in the bucket + env.s3.Object(env.bucket_name, "somekey").put(Body=b"123abc") + + # Act + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + # Create empty in-memory bytestream to act as a writable file + output = io.BytesIO() + bucket.download_fileobj("somekey", output) + # Reset binary cursor to prepare for read + output.seek(0) + + # Assert + assert output.read() == b"123abc" + + +def test_getattr_delegates(s3_setup): + """ + Verify that attribute access is delegated to the underlying boto3 Bucket. + + This checks that accessing e.g. `.name` on our custom Bucket works by forwarding + the call to the real boto3 bucket. + """ + # Arrange + env = s3_setup + + # Act + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + + # Assert + # __getattr__ should delegate this to the underlying boto3 Bucket object + assert bucket.name == env.bucket_name diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py new file mode 100644 index 00000000..1cf6d845 --- /dev/null +++ b/tests/s3/test_s3_artifacts.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- + +from pathlib import Path +from tempfile import TemporaryDirectory +import pytest + +from gardenlinux.s3.s3_artifacts import S3Artifacts + +RELEASE_DATA = """ + GARDENLINUX_VERSION = 1234.1 + GARDENLINUX_COMMIT_ID = abc123 + GARDENLINUX_COMMIT_ID_LONG = abc123long + GARDENLINUX_FEATURES = _usi,_trustedboot + """ + + +def test_s3artifacts_init_success(s3_setup): + # Arrange + env = s3_setup + + # Act + s3_artifacts = S3Artifacts(env.bucket_name) + + # Assert + assert s3_artifacts._bucket.name == env.bucket_name + + +def tets_s3artifacts_invalid_bucket(): + # Act / Assert + with pytest.raises(Exception): + S3Artifacts("unknown-bucket") + + +def test_download_to_directory_success(s3_setup): + """ + Test download of multiple files to a directory on disk. + """ + # Arrange + env = s3_setup + bucket = env.s3.Bucket(env.bucket_name) + + bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"metadata") + bucket.put_object(Key=f"objects/{env.cname}/file1", Body=b"data1") + bucket.put_object(Key=f"objects/{env.cname}/file2", Body=b"data2") + + with TemporaryDirectory() as tmpdir: + outdir = Path(tmpdir) + + # Act + artifacts = S3Artifacts(env.bucket_name) + artifacts.download_to_directory(env.cname, outdir) + + # Assert + assert (outdir / f"{env.cname}.s3_metadata.yaml").read_bytes() == b"metadata" + assert (outdir / "file1").read_bytes() == b"data1" + assert (outdir / "file2").read_bytes() == b"data2" + + +def test_download_to_directory_invalid_path(s3_setup): + """ + Test proper handling of download attempt to invalid path. + """ + # Arrange + env = s3_setup + artifacts = S3Artifacts(env.bucket_name) + + # Act / Assert + with pytest.raises(RuntimeError): + artifacts.download_to_directory({env.cname}, "/invalid/path/does/not/exist") + + +def test_upload_from_directory_success(s3_setup): + """ + Test upload of multiple artifacts from disk to bucket + """ + # Arrange + env = s3_setup + + release_path = env.tmp_path / f"{env.cname}.release" + release_path.write_text(RELEASE_DATA) + + for filename in [f"{env.cname}-file1", f"{env.cname}-file2"]: + (env.tmp_path / filename).write_bytes(b"dummy content") + + # Act + artifacts = S3Artifacts(env.bucket_name) + artifacts.upload_from_directory(env.cname, env.tmp_path) + + # Assert + bucket = env.s3.Bucket(env.bucket_name) + keys = [obj.key for obj in bucket.objects.all()] + assert f"objects/{env.cname}/{env.cname}-file1" in keys + assert f"objects/{env.cname}/{env.cname}-file2" in keys + assert f"meta/singles/{env.cname}" in keys + + +def test_upload_from_directory_with_delete(s3_setup): + """ + Test that upload_from_directory deletes existing files before uploading + when delete_before_push=True. + """ + env = s3_setup + bucket = env.s3.Bucket(env.bucket_name) + + # Arrange: create release and artifact files locally + release = env.tmp_path / f"{env.cname}.release" + release.write_text(RELEASE_DATA) + + artifact = env.tmp_path / f"{env.cname}.kernel" + artifact.write_bytes(b"fake") + + # Arrange: put dummy existing objects to be deleted + bucket.put_object(Key=f"objects/{env.cname}/{artifact.name}", Body=b"old data") + bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"old metadata") + + artifacts = S3Artifacts(env.bucket_name) + + # Act + artifacts.upload_from_directory(env.cname, env.tmp_path, delete_before_push=True) + + # Assert + keys = [obj.key for obj in bucket.objects.all()] + + # The old key should no longer be present as old data (no duplicates) + # but the new upload file key should exist (artifact uploaded) + assert f"objects/{env.cname}/{artifact.name}" in keys + assert f"meta/singles/{env.cname}" in keys