From b23a05aa3ce5ba16805aee6dc1c1f59249d58ca3 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Wed, 30 Jul 2025 12:05:43 +0200 Subject: [PATCH 01/12] Add moto dependency for s3 mocking --- poetry.lock | 132 ++++++++++++++++++++++++++++++++++++++++++------- pyproject.toml | 1 + 2 files changed, 115 insertions(+), 18 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9c3cbffc..fab6068d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -134,7 +134,7 @@ version = "1.39.10" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "boto3-1.39.10-py3-none-any.whl", hash = "sha256:5b2aa5b7d075491c7c6cb539255a44f0ebd70ffc540e32dfd56eb8a361390e5e"}, {file = "boto3-1.39.10.tar.gz", hash = "sha256:62a1623c8c9495625c88778869b75b692a7ef226fcbb96ea58365b9f043a2ae2"}, @@ -154,7 +154,7 @@ version = "1.39.10" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "botocore-1.39.10-py3-none-any.whl", hash = "sha256:d279f252a37bfa7d8a628404ea745c0163ed6260e037d813ce0aef2996ffea28"}, {file = "botocore-1.39.10.tar.gz", hash = "sha256:7fe00007304fe4627d7dd7b8605c6666a560651e91100f47391ae40a310dc092"}, @@ -174,7 +174,7 @@ version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, @@ -186,7 +186,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, @@ -267,7 +267,7 @@ version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, @@ -389,7 +389,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} [[package]] name = "cryptography" @@ -397,7 +397,7 @@ version = "45.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"}, {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"}, @@ -503,7 +503,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -542,7 +542,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -560,7 +560,7 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -634,7 +634,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -711,6 +711,52 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "moto" +version = "5.1.9" +description = "A library that allows you to easily mock out tests based on AWS infrastructure" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "moto-5.1.9-py3-none-any.whl", hash = "sha256:e9ba7e4764a6088ccc34e3cc846ae719861ca202409fa865573de40a3e805b9b"}, + {file = "moto-5.1.9.tar.gz", hash = "sha256:0c4f0387b06b5d24c0ce90f8f89f31a565cc05789189c5d59b5df02594f2e371"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +cryptography = ">=35.0.0" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0,<0.25.5 || >0.25.5" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] +events = ["jsonpath_ng"] +glue = ["pyparsing (>=3.0.7)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +quicksight = ["jsonschema"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.1)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.1)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -843,7 +889,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, @@ -893,7 +939,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1002,7 +1048,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -1018,6 +1064,26 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "responses" +version = "0.25.7" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, + {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + [[package]] name = "rich" version = "14.0.0" @@ -1170,7 +1236,7 @@ version = "0.13.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, @@ -1209,7 +1275,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1430,7 +1496,7 @@ version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "docs"] +groups = ["main", "dev", "docs"] files = [ {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, @@ -1442,7 +1508,37 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "0.14.2" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, +] + [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "00e38dd1eac56b7b030310490d2e0da9aa380314d92834ce06deec4f2e5e3d85" +content-hash = "16d757c1d7526b62d923b28c642f55847ab6fad6505101a1313ca7fb3e6f0800" diff --git a/pyproject.toml b/pyproject.toml index aa6bd544..579c1acf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ pygments = "^2.19.1" [tool.poetry.group.dev.dependencies] bandit = "^1.8.3" black = "^25.1.0" +moto = "^5.1.9" [tool.poetry.group.docs.dependencies] sphinx-rtd-theme = "^3.0.2" From 44b2c482d3521ace65204e71d1161e65ce10d749 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Wed, 30 Jul 2025 12:21:25 +0200 Subject: [PATCH 02/12] Test: Add tests for Bucket class --- tests/s3/__init__.py | 0 tests/s3/test_bucket.py | 145 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 tests/s3/__init__.py create mode 100644 tests/s3/test_bucket.py diff --git a/tests/s3/__init__.py b/tests/s3/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py new file mode 100644 index 00000000..6f5b81a4 --- /dev/null +++ b/tests/s3/test_bucket.py @@ -0,0 +1,145 @@ +""" +Test the `Bucket` class in `src/gardenlinux/s3/bucket.py` by using +mock AWS interactions provided by the `moto` module. + +The `@mock_aws` decorator `moto` provides ensures a fresh +but fake AWS-like environment. +""" + +import io +import pytest +import boto3 +from moto import mock_aws +from pathlib import Path + +from gardenlinux.s3.bucket import Bucket + + +BUCKET_NAME = "test-bucket" +REGION = "us-east-1" + + +@mock_aws +def test_objects_empty(): + """ + List objects from empty bucket. + """ + # Arrange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + + # Act + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + objects = list(bucket.objects) + + # Assert + assert objects == [] + + +@mock_aws +def test_upload_file_and_list(tmp_path): + """ + Create a fake file in a temporary directory, upload and try + to list it + """ + # Arrange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + + test_file = tmp_path / "example.txt" + test_file.write_text("hello moto") + + # Act + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + bucket.upload_file(str(test_file), "example.txt") + + all_keys = [obj.key for obj in bucket.objects] + + # Assert + assert "example.txt" in all_keys + + +@mock_aws +def test_download_file(tmp_path): + """ + Try to download a file pre-existing in the bucket + """ + # Arrange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + s3.Object(BUCKET_NAME, "file.txt").put(Body=b"some data") + + # Act + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + + target_path = tmp_path / "downloaded.txt" + bucket.download_file("file.txt", str(target_path)) + + # Assert + assert target_path.read_text() == "some data" + + +@mock_aws +def test_upload_fileobj(): + """ + Upload a file-like in-memory object to the bucket + """ + # Arrange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + + # Act + # Create in-memory binary stream (file content) + data = io.BytesIO(b"Test Data") + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + bucket.upload_fileobj(data, "binary.obj") + + obj = s3.Object(BUCKET_NAME, "binary.obj").get() + + # Assert + assert obj["Body"].read() == b"Test Data" + + +@mock_aws +def test_download_fileobj(): + """ + Download data into a in-memory object + """ + # Arange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + # Put some object in the bucket + s3.Object(BUCKET_NAME, "somekey").put(Body=b"123abc") + + # Act + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + + # Create empty in-memory bytestream to act as a writable file + output = io.BytesIO() + bucket.download_fileobj("somekey", output) + + # Reset binary cursor to prepare for read + output.seek(0) + + # Assert + assert output.read() == b"123abc" + + +@mock_aws +def test_getattr_delegates(): + """ + Verify that attribute access is delegated to the underlying boto3 Bucket. + + This checks that accessing e.g. `.name` on our custom Bucket works by forwarding + the call to the real boto3 bucket. + """ + # Arrange + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) + + # Act + bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + + # Assert + # __getattr__ should delegate this to the underlying boto3 Bucket object + assert bucket.name == BUCKET_NAME From a1902837fb18253375fcc0a3522c42364ea8d866 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Wed, 30 Jul 2025 16:39:08 +0200 Subject: [PATCH 03/12] Test: Add tests for S3Artifact class --- tests/s3/test_s3_artifacts.py | 221 ++++++++++++++++++++++++++++++++++ 1 file changed, 221 insertions(+) create mode 100644 tests/s3/test_s3_artifacts.py diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py new file mode 100644 index 00000000..6e44bd7f --- /dev/null +++ b/tests/s3/test_s3_artifacts.py @@ -0,0 +1,221 @@ +import pytest +from unittest.mock import MagicMock, patch +from pathlib import Path +from tempfile import TemporaryDirectory +from hashlib import md5, sha256 +from gardenlinux.s3.s3_artifacts import S3Artifacts + + +# Dummy CName replacement +class DummyCName: + def __init__(self, cname): + self.platform = "aws" + self.arch = "amd64" + self.version = "1234.1" + self.commit_id = "abc123" + + +# Helpers to compute digests for fake files +def dummy_digest(data: bytes, algo: str) -> str: + """ + Dummy for file_digest() to compute hashes for in-memory byte streams + """ + content = data.read() + data.seek(0) # Reset byte cursor to start for multiple uses + + if algo == "md5": + return md5(content) + elif algo == "sha256": + return sha256(content) + else: + raise ValueError(f"Unsupported algo: {algo}") + + +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_s3artifacts_init_success(mock_bucket_class): + """ + Sanity test to assert correct instantiation of S3Artifacts object + """ + mock_bucket_instance = MagicMock() + mock_bucket_class.return_value = mock_bucket_instance + + s3 = S3Artifacts("my-bucket") + + mock_bucket_class.assert_called_once_with("my-bucket", None, None) + assert s3._bucket == mock_bucket_instance + + +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_s3_artifacts_invalid_bucket(mock_bucket): + """ + Sanity test to check proper Error raising when using non-existing bucket + """ + # Simulate an exception being raised when trying to create the Bucket + mock_bucket.side_effect = RuntimeError("Bucket does not exist") + + with pytest.raises(RuntimeError, match="Bucket does not exist"): + S3Artifacts("invalid-bucket") + + +@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_download_to_directory_success(mock_bucket_class): + """ + Test download of mutliple files to directory on disk. + """ + # Arrange + # Create mock bucket instance + mock_bucket = MagicMock() + + # Mock release object + release_object = MagicMock() + release_object.key = "meta/singles/testcname" + + # Mock objects to be downloaded + s3_obj1 = MagicMock() + s3_obj1.key = "objects/testcname/file1" + s3_obj2 = MagicMock() + s3_obj2.key = "objects/testcname/file2" + + # Mock return value of .filter().all() from boto3 + class MockFilterReturn: + def all(self): + return [s3_obj1, s3_obj2] + + # Mock teh behaviour of .objects.filter(Prefix=...) + # Lets us simulate different responses depending on prefix + def filter_side_effect(Prefix): + # When fetching metadata + if Prefix == "meta/singles/testcname": + return [release_object] # return list with release file + # When fetching actual artifact + elif Prefix == "objects/testcname": + return MockFilterReturn() # return mock object + return [] # Nothing found + + # Act + mock_bucket.objects.filter.side_effect = filter_side_effect + mock_bucket_class.return_value = mock_bucket + + with TemporaryDirectory() as tmpdir: + artifacts_dir = Path(tmpdir) + + s3 = S3Artifacts("test-bucket") + s3.download_to_directory("testcname", artifacts_dir) + + # Assert + # Validate download_file called with correct metadata path + mock_bucket.download_file.assert_any_call( + "meta/singles/testcname", + artifacts_dir / "testcname.s3_metadata.yaml", + ) + + # Validate files were downloaded from object keys + mock_bucket.download_file.assert_any_call( + "objects/testcname/file1", artifacts_dir / "file1" + ) + mock_bucket.download_file.assert_any_call( + "objects/testcname/file2", artifacts_dir / "file2" + ) + + assert mock_bucket.download_file.call_count == 3 + + +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_download_to_directory_invalid_path(mock_bucket): + """ + Sanity Test to test behaviour on invalid paths + """ + s3 = S3Artifacts("bucket") + with pytest.raises(RuntimeError): + s3.download_to_directory("test-cname", "/invalid/path/does/not/exist") + + +@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest) +@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_upload_from_directory_success(mock_bucket_class, mock_digest): + """ + Test upload of multiple artifacts from disk to bucket + """ + # Arrange + mock_bucket = MagicMock() + mock_bucket.name = "test-bucket" + mock_bucket_class.return_value = mock_bucket + + # Create a fake .release file + release_data = """ + GARDENLINUX_VERSION = 1234.1 + GARDENLINUX_COMMIT_ID = abc123 + GARDENLINUX_COMMIT_ID_LONG = abc123long + GARDENLINUX_FEATURES = _usi,_trustedboot + """ + + # Create a fake release file and two artifact files + with TemporaryDirectory() as tmpdir: + artifacts_dir = Path(tmpdir) + cname = "testcname" + + # Write .release file + release_path = artifacts_dir / f"{cname}.release" + release_path.write_text(release_data) + + # Create dummy files for upload + for name in [f"{cname}-file1", f"{cname}-file2"]: + (artifacts_dir / name).write_bytes(b"dummy content") + + s3 = S3Artifacts("test-bucket") + + # Act + s3.upload_from_directory(cname, artifacts_dir) + + # Assert + calls = mock_bucket.upload_file.call_args_list + + # Check that for each file, upload_file was called with ExtraArgs containing "Tagging" + for name in [f"{cname}-file1", f"{cname}-file2"]: + key = f"objects/{cname}/{name}" + path = artifacts_dir / name + + # Look for a call with matching positional args (path, key) + matching_calls = [ + call + for call in calls + if call.args[0] == path + and call.args[1] == key + and isinstance(call.kwargs.get("ExtraArgs"), dict) + and "Tagging" in call.kwargs["ExtraArgs"] + ] + assert matching_calls, f"upload_file was not called with Tagging for {name}" + + +@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest) +@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) +@patch("gardenlinux.s3.s3_artifacts.Bucket") +def test_upload_from_directory_with_delete(mock_bucket_class, mock_digest, tmp_path): + """ + Test that upload_from_directory deletes existing files before uploading + when delete_before_push=True + """ + mock_bucket = MagicMock() + mock_bucket.name = "test-bucket" + mock_bucket_class.return_value = mock_bucket + + s3 = S3Artifacts("test-bucket") + cname = "test-cname" + + release = tmp_path / f"{cname}.release" + release.write_text( + "GARDENLINUX_VERSION = 1234.1\n" + "GARDENLINUX_COMMIT_ID = abc123\n" + "GARDENLINUX_COMMIT_ID_LONG = abc123long\n" + "GARDENLINUX_FEATURES = _usi,_trustedboot\n" + ) + + artifact = tmp_path / f"{cname}.kernel" + artifact.write_bytes(b"fake") + + s3.upload_from_directory(cname, tmp_path, delete_before_push=True) + + mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"objects/{cname}/{artifact.name}"}]}) + mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]}) From 3d509073646f4e5a069f2fdf68136339b6adad3f Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Wed, 30 Jul 2025 16:48:39 +0200 Subject: [PATCH 04/12] Format code and fix bandit error for md5 use in mock --- tests/s3/test_s3_artifacts.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index 6e44bd7f..16a10dda 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -21,10 +21,10 @@ def dummy_digest(data: bytes, algo: str) -> str: Dummy for file_digest() to compute hashes for in-memory byte streams """ content = data.read() - data.seek(0) # Reset byte cursor to start for multiple uses + data.seek(0) # Reset byte cursor to start for multiple uses if algo == "md5": - return md5(content) + return md5(content) # nosec B324 elif algo == "sha256": return sha256(content) else: @@ -87,11 +87,11 @@ def all(self): def filter_side_effect(Prefix): # When fetching metadata if Prefix == "meta/singles/testcname": - return [release_object] # return list with release file + return [release_object] # return list with release file # When fetching actual artifact elif Prefix == "objects/testcname": - return MockFilterReturn() # return mock object - return [] # Nothing found + return MockFilterReturn() # return mock object + return [] # Nothing found # Act mock_bucket.objects.filter.side_effect = filter_side_effect @@ -217,5 +217,9 @@ def test_upload_from_directory_with_delete(mock_bucket_class, mock_digest, tmp_p s3.upload_from_directory(cname, tmp_path, delete_before_push=True) - mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"objects/{cname}/{artifact.name}"}]}) - mock_bucket.delete_objects.assert_any_call(Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]}) + mock_bucket.delete_objects.assert_any_call( + Delete={"Objects": [{"Key": f"objects/{cname}/{artifact.name}"}]} + ) + mock_bucket.delete_objects.assert_any_call( + Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]} + ) From a4190b9da714e9d2e7d012d8becb258430e7e25c Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 11:05:23 +0200 Subject: [PATCH 05/12] Tests: Rewrite S3Artifact tests * Use fixture with mock_aws context manager instead of directly patching Bucket --- tests/s3/test_s3_artifacts.py | 228 +++++++++++++--------------------- 1 file changed, 89 insertions(+), 139 deletions(-) diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index 16a10dda..f6d5a90c 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -1,14 +1,18 @@ import pytest -from unittest.mock import MagicMock, patch +import boto3 from pathlib import Path from tempfile import TemporaryDirectory from hashlib import md5, sha256 +from moto import mock_aws + from gardenlinux.s3.s3_artifacts import S3Artifacts +CNAME = "testcname" + # Dummy CName replacement class DummyCName: - def __init__(self, cname): + def __init__(self, cname): # pylint: disable=unused-argument self.platform = "aws" self.arch = "amd64" self.version = "1234.1" @@ -31,119 +35,83 @@ def dummy_digest(data: bytes, algo: str) -> str: raise ValueError(f"Unsupported algo: {algo}") -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_s3artifacts_init_success(mock_bucket_class): +@pytest.fixture(autouse=True) +def s3_setup(tmp_path, monkeypatch): """ - Sanity test to assert correct instantiation of S3Artifacts object + Provides a clean S3 setup for each test. """ - mock_bucket_instance = MagicMock() - mock_bucket_class.return_value = mock_bucket_instance + with mock_aws(): + s3 = boto3.resource("s3", region_name="us-east-1") + bucket_name = "test-bucket" + s3.create_bucket(Bucket=bucket_name) - s3 = S3Artifacts("my-bucket") + monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName) + monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) - mock_bucket_class.assert_called_once_with("my-bucket", None, None) - assert s3._bucket == mock_bucket_instance + yield s3, bucket_name, tmp_path -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_s3_artifacts_invalid_bucket(mock_bucket): - """ - Sanity test to check proper Error raising when using non-existing bucket - """ - # Simulate an exception being raised when trying to create the Bucket - mock_bucket.side_effect = RuntimeError("Bucket does not exist") +def test_s3artifacts_init_success(s3_setup): + # Arrange + _, bucket_name, _ = s3_setup + + # Act + s3_artifacts = S3Artifacts(bucket_name) - with pytest.raises(RuntimeError, match="Bucket does not exist"): - S3Artifacts("invalid-bucket") + # Assert + assert s3_artifacts._bucket.name == bucket_name -@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_download_to_directory_success(mock_bucket_class): +def tets_s3artifacts_invalid_bucket(): + # Act / Assert + with pytest.raises(Exception): + S3Artifacts("unknown-bucket") + + +def test_download_to_directory_success(s3_setup): """ - Test download of mutliple files to directory on disk. + Test download of multiple files to a directory on disk. """ # Arrange - # Create mock bucket instance - mock_bucket = MagicMock() - - # Mock release object - release_object = MagicMock() - release_object.key = "meta/singles/testcname" - - # Mock objects to be downloaded - s3_obj1 = MagicMock() - s3_obj1.key = "objects/testcname/file1" - s3_obj2 = MagicMock() - s3_obj2.key = "objects/testcname/file2" - - # Mock return value of .filter().all() from boto3 - class MockFilterReturn: - def all(self): - return [s3_obj1, s3_obj2] - - # Mock teh behaviour of .objects.filter(Prefix=...) - # Lets us simulate different responses depending on prefix - def filter_side_effect(Prefix): - # When fetching metadata - if Prefix == "meta/singles/testcname": - return [release_object] # return list with release file - # When fetching actual artifact - elif Prefix == "objects/testcname": - return MockFilterReturn() # return mock object - return [] # Nothing found + s3, bucket_name, _ = s3_setup + bucket = s3.Bucket(bucket_name) - # Act - mock_bucket.objects.filter.side_effect = filter_side_effect - mock_bucket_class.return_value = mock_bucket + bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"metadata") + bucket.put_object(Key=f"objects/{CNAME}/file1", Body=b"data1") + bucket.put_object(Key=f"objects/{CNAME}/file2", Body=b"data2") with TemporaryDirectory() as tmpdir: - artifacts_dir = Path(tmpdir) + outdir = Path(tmpdir) - s3 = S3Artifacts("test-bucket") - s3.download_to_directory("testcname", artifacts_dir) + # Act + artifacts = S3Artifacts(bucket_name) + artifacts.download_to_directory(CNAME, outdir) # Assert - # Validate download_file called with correct metadata path - mock_bucket.download_file.assert_any_call( - "meta/singles/testcname", - artifacts_dir / "testcname.s3_metadata.yaml", - ) + assert (outdir / f"{CNAME}.s3_metadata.yaml").read_bytes() == b"metadata" + assert (outdir / "file1").read_bytes() == b"data1" + assert (outdir / "file2").read_bytes() == b"data2" - # Validate files were downloaded from object keys - mock_bucket.download_file.assert_any_call( - "objects/testcname/file1", artifacts_dir / "file1" - ) - mock_bucket.download_file.assert_any_call( - "objects/testcname/file2", artifacts_dir / "file2" - ) - assert mock_bucket.download_file.call_count == 3 - - -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_download_to_directory_invalid_path(mock_bucket): +def test_download_to_directory_invalid_path(s3_setup): """ - Sanity Test to test behaviour on invalid paths + Test proper handling of download attempt to invalid path. """ - s3 = S3Artifacts("bucket") + # Arrange + _, bucket_name, _ = s3_setup + artifacts = S3Artifacts(bucket_name) + + # Act / Assert with pytest.raises(RuntimeError): - s3.download_to_directory("test-cname", "/invalid/path/does/not/exist") + artifacts.download_to_directory({CNAME}, "/invalid/path/does/not/exist") -@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest) -@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_upload_from_directory_success(mock_bucket_class, mock_digest): +def test_upload_from_directory_success(s3_setup): """ Test upload of multiple artifacts from disk to bucket """ # Arrange - mock_bucket = MagicMock() - mock_bucket.name = "test-bucket" - mock_bucket_class.return_value = mock_bucket - - # Create a fake .release file + s3, bucket_name, tmp_path = s3_setup release_data = """ GARDENLINUX_VERSION = 1234.1 GARDENLINUX_COMMIT_ID = abc123 @@ -151,60 +119,34 @@ def test_upload_from_directory_success(mock_bucket_class, mock_digest): GARDENLINUX_FEATURES = _usi,_trustedboot """ - # Create a fake release file and two artifact files - with TemporaryDirectory() as tmpdir: - artifacts_dir = Path(tmpdir) - cname = "testcname" + release_path = tmp_path / f"{CNAME}.release" + release_path.write_text(release_data) - # Write .release file - release_path = artifacts_dir / f"{cname}.release" - release_path.write_text(release_data) + for filename in [f"{CNAME}-file1", f"{CNAME}-file2"]: + (tmp_path / filename).write_bytes(b"dummy content") - # Create dummy files for upload - for name in [f"{cname}-file1", f"{cname}-file2"]: - (artifacts_dir / name).write_bytes(b"dummy content") + # Act + artifacts = S3Artifacts(bucket_name) + artifacts.upload_from_directory(CNAME, tmp_path) - s3 = S3Artifacts("test-bucket") + # Assert + bucket = s3.Bucket(bucket_name) + keys = [obj.key for obj in bucket.objects.all()] + assert f"objects/{CNAME}/{CNAME}-file1" in keys + assert f"objects/{CNAME}/{CNAME}-file2" in keys + assert f"meta/singles/{CNAME}" in keys - # Act - s3.upload_from_directory(cname, artifacts_dir) - # Assert - calls = mock_bucket.upload_file.call_args_list - - # Check that for each file, upload_file was called with ExtraArgs containing "Tagging" - for name in [f"{cname}-file1", f"{cname}-file2"]: - key = f"objects/{cname}/{name}" - path = artifacts_dir / name - - # Look for a call with matching positional args (path, key) - matching_calls = [ - call - for call in calls - if call.args[0] == path - and call.args[1] == key - and isinstance(call.kwargs.get("ExtraArgs"), dict) - and "Tagging" in call.kwargs["ExtraArgs"] - ] - assert matching_calls, f"upload_file was not called with Tagging for {name}" - - -@patch("gardenlinux.s3.s3_artifacts.file_digest", side_effect=dummy_digest) -@patch("gardenlinux.s3.s3_artifacts.CName", new=DummyCName) -@patch("gardenlinux.s3.s3_artifacts.Bucket") -def test_upload_from_directory_with_delete(mock_bucket_class, mock_digest, tmp_path): +def test_upload_from_directory_with_delete(s3_setup): """ Test that upload_from_directory deletes existing files before uploading - when delete_before_push=True + when delete_before_push=True. """ - mock_bucket = MagicMock() - mock_bucket.name = "test-bucket" - mock_bucket_class.return_value = mock_bucket - - s3 = S3Artifacts("test-bucket") - cname = "test-cname" + s3, bucket_name, tmp_path = s3_setup + bucket = s3.Bucket(bucket_name) - release = tmp_path / f"{cname}.release" + # Arrange: create release and artifact files locally + release = tmp_path / f"{CNAME}.release" release.write_text( "GARDENLINUX_VERSION = 1234.1\n" "GARDENLINUX_COMMIT_ID = abc123\n" @@ -212,14 +154,22 @@ def test_upload_from_directory_with_delete(mock_bucket_class, mock_digest, tmp_p "GARDENLINUX_FEATURES = _usi,_trustedboot\n" ) - artifact = tmp_path / f"{cname}.kernel" + artifact = tmp_path / f"{CNAME}.kernel" artifact.write_bytes(b"fake") - s3.upload_from_directory(cname, tmp_path, delete_before_push=True) + # Arrange: put dummy existing objects to be deleted + bucket.put_object(Key=f"objects/{CNAME}/{artifact.name}", Body=b"old data") + bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"old metadata") - mock_bucket.delete_objects.assert_any_call( - Delete={"Objects": [{"Key": f"objects/{cname}/{artifact.name}"}]} - ) - mock_bucket.delete_objects.assert_any_call( - Delete={"Objects": [{"Key": f"meta/singles/{cname}"}]} - ) + artifacts = S3Artifacts(bucket_name) + + # Act + artifacts.upload_from_directory(CNAME, tmp_path, delete_before_push=True) + + # Assert + keys = [obj.key for obj in bucket.objects.all()] + + # The old key should no longer be present as old data (no duplicates) + # but the new upload file key should exist (artifact uploaded) + assert f"objects/{CNAME}/{artifact.name}" in keys + assert f"meta/singles/{CNAME}" in keys From 1c490eb7fbb7a92aad71cc758f0dbcb05b72bd4e Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 11:43:53 +0200 Subject: [PATCH 06/12] Tests: Split fixtures and mocks into conftest --- tests/s3/conftest.py | 46 +++++++++++++++++++++++++++++++++++ tests/s3/test_s3_artifacts.py | 44 --------------------------------- 2 files changed, 46 insertions(+), 44 deletions(-) create mode 100644 tests/s3/conftest.py diff --git a/tests/s3/conftest.py b/tests/s3/conftest.py new file mode 100644 index 00000000..23f5db6d --- /dev/null +++ b/tests/s3/conftest.py @@ -0,0 +1,46 @@ +import pytest +from pathlib import Path +from moto import mock_aws +from hashlib import md5, sha256 +import boto3 + + +# Dummy CName replacement +class DummyCName: + def __init__(self, cname): # pylint: disable=unused-argument + self.platform = "aws" + self.arch = "amd64" + self.version = "1234.1" + self.commit_id = "abc123" + + +# Helpers to compute digests for fake files +def dummy_digest(data: bytes, algo: str) -> str: + """ + Dummy for file_digest() to compute hashes for in-memory byte streams + """ + content = data.read() + data.seek(0) # Reset byte cursor to start for multiple uses + + if algo == "md5": + return md5(content) # nosec B324 + elif algo == "sha256": + return sha256(content) + else: + raise ValueError(f"Unsupported algo: {algo}") + + +@pytest.fixture(autouse=True) +def s3_setup(tmp_path, monkeypatch): + """ + Provides a clean S3 setup for each test. + """ + with mock_aws(): + s3 = boto3.resource("s3", region_name="us-east-1") + bucket_name = "test-bucket" + s3.create_bucket(Bucket=bucket_name) + + monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName) + monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) + + yield s3, bucket_name, tmp_path diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index f6d5a90c..aa827c7e 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -1,56 +1,12 @@ import pytest -import boto3 from pathlib import Path from tempfile import TemporaryDirectory -from hashlib import md5, sha256 -from moto import mock_aws from gardenlinux.s3.s3_artifacts import S3Artifacts CNAME = "testcname" -# Dummy CName replacement -class DummyCName: - def __init__(self, cname): # pylint: disable=unused-argument - self.platform = "aws" - self.arch = "amd64" - self.version = "1234.1" - self.commit_id = "abc123" - - -# Helpers to compute digests for fake files -def dummy_digest(data: bytes, algo: str) -> str: - """ - Dummy for file_digest() to compute hashes for in-memory byte streams - """ - content = data.read() - data.seek(0) # Reset byte cursor to start for multiple uses - - if algo == "md5": - return md5(content) # nosec B324 - elif algo == "sha256": - return sha256(content) - else: - raise ValueError(f"Unsupported algo: {algo}") - - -@pytest.fixture(autouse=True) -def s3_setup(tmp_path, monkeypatch): - """ - Provides a clean S3 setup for each test. - """ - with mock_aws(): - s3 = boto3.resource("s3", region_name="us-east-1") - bucket_name = "test-bucket" - s3.create_bucket(Bucket=bucket_name) - - monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName) - monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) - - yield s3, bucket_name, tmp_path - - def test_s3artifacts_init_success(s3_setup): # Arrange _, bucket_name, _ = s3_setup From a3452fd343e9902302701aaee06cbbc108ec05d0 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 11:54:38 +0200 Subject: [PATCH 07/12] Tests: Refactor Bucket tests to use fixtures --- tests/s3/test_bucket.py | 65 +++++++++++++++-------------------------- 1 file changed, 23 insertions(+), 42 deletions(-) diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py index 6f5b81a4..9752a9e1 100644 --- a/tests/s3/test_bucket.py +++ b/tests/s3/test_bucket.py @@ -8,49 +8,41 @@ import io import pytest -import boto3 -from moto import mock_aws from pathlib import Path from gardenlinux.s3.bucket import Bucket -BUCKET_NAME = "test-bucket" REGION = "us-east-1" -@mock_aws -def test_objects_empty(): +def test_objects_empty(s3_setup): """ List objects from empty bucket. """ # Arrange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) + s3, bucket_name, _ = s3_setup # Act - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) - objects = list(bucket.objects) + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) # Assert - assert objects == [] + assert list(bucket.objects) == [] -@mock_aws -def test_upload_file_and_list(tmp_path): +def test_upload_file_and_list(s3_setup): """ Create a fake file in a temporary directory, upload and try to list it """ # Arrange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) + s3, bucket_name, tmp_path = s3_setup test_file = tmp_path / "example.txt" test_file.write_text("hello moto") # Act - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) bucket.upload_file(str(test_file), "example.txt") all_keys = [obj.key for obj in bucket.objects] @@ -59,19 +51,16 @@ def test_upload_file_and_list(tmp_path): assert "example.txt" in all_keys -@mock_aws -def test_download_file(tmp_path): +def test_download_file(s3_setup): """ Try to download a file pre-existing in the bucket """ # Arrange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) - s3.Object(BUCKET_NAME, "file.txt").put(Body=b"some data") + s3, bucket_name, tmp_path = s3_setup + s3.Object(bucket_name, "file.txt").put(Body=b"some data") # Act - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) - + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) target_path = tmp_path / "downloaded.txt" bucket.download_file("file.txt", str(target_path)) @@ -79,45 +68,39 @@ def test_download_file(tmp_path): assert target_path.read_text() == "some data" -@mock_aws -def test_upload_fileobj(): +def test_upload_fileobj(s3_setup): """ Upload a file-like in-memory object to the bucket """ # Arrange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) + s3, bucket_name, _ = s3_setup # Act # Create in-memory binary stream (file content) data = io.BytesIO(b"Test Data") - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) bucket.upload_fileobj(data, "binary.obj") - obj = s3.Object(BUCKET_NAME, "binary.obj").get() + obj = s3.Object(bucket_name, "binary.obj").get() # Assert assert obj["Body"].read() == b"Test Data" -@mock_aws -def test_download_fileobj(): +def test_download_fileobj(s3_setup): """ Download data into a in-memory object """ # Arange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) + s3, bucket_name, _ = s3_setup # Put some object in the bucket - s3.Object(BUCKET_NAME, "somekey").put(Body=b"123abc") + s3.Object(bucket_name, "somekey").put(Body=b"123abc") # Act - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) - + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) # Create empty in-memory bytestream to act as a writable file output = io.BytesIO() bucket.download_fileobj("somekey", output) - # Reset binary cursor to prepare for read output.seek(0) @@ -125,8 +108,7 @@ def test_download_fileobj(): assert output.read() == b"123abc" -@mock_aws -def test_getattr_delegates(): +def test_getattr_delegates(s3_setup): """ Verify that attribute access is delegated to the underlying boto3 Bucket. @@ -134,12 +116,11 @@ def test_getattr_delegates(): the call to the real boto3 bucket. """ # Arrange - s3 = boto3.resource("s3", region_name=REGION) - s3.create_bucket(Bucket=BUCKET_NAME) + _, bucket_name, _ = s3_setup # Act - bucket = Bucket(bucket_name=BUCKET_NAME, s3_resource_config={"region_name": REGION}) + bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) # Assert # __getattr__ should delegate this to the underlying boto3 Bucket object - assert bucket.name == BUCKET_NAME + assert bucket.name == bucket_name From 5a7dbaeafb8c18bfe306b8d5abcdbb778173fcb7 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 15:34:21 +0200 Subject: [PATCH 08/12] Tests: Extract release data into constant --- tests/s3/test_s3_artifacts.py | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index aa827c7e..4c006fa7 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -5,6 +5,12 @@ from gardenlinux.s3.s3_artifacts import S3Artifacts CNAME = "testcname" +RELEASE_DATA = """ + GARDENLINUX_VERSION = 1234.1 + GARDENLINUX_COMMIT_ID = abc123 + GARDENLINUX_COMMIT_ID_LONG = abc123long + GARDENLINUX_FEATURES = _usi,_trustedboot + """ def test_s3artifacts_init_success(s3_setup): @@ -68,15 +74,9 @@ def test_upload_from_directory_success(s3_setup): """ # Arrange s3, bucket_name, tmp_path = s3_setup - release_data = """ - GARDENLINUX_VERSION = 1234.1 - GARDENLINUX_COMMIT_ID = abc123 - GARDENLINUX_COMMIT_ID_LONG = abc123long - GARDENLINUX_FEATURES = _usi,_trustedboot - """ release_path = tmp_path / f"{CNAME}.release" - release_path.write_text(release_data) + release_path.write_text(RELEASE_DATA) for filename in [f"{CNAME}-file1", f"{CNAME}-file2"]: (tmp_path / filename).write_bytes(b"dummy content") @@ -103,12 +103,7 @@ def test_upload_from_directory_with_delete(s3_setup): # Arrange: create release and artifact files locally release = tmp_path / f"{CNAME}.release" - release.write_text( - "GARDENLINUX_VERSION = 1234.1\n" - "GARDENLINUX_COMMIT_ID = abc123\n" - "GARDENLINUX_COMMIT_ID_LONG = abc123long\n" - "GARDENLINUX_FEATURES = _usi,_trustedboot\n" - ) + release.write_text(RELEASE_DATA) artifact = tmp_path / f"{CNAME}.kernel" artifact.write_bytes(b"fake") From 445a4e43c90c6bbeeedc751739b30c6b823234e0 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 15:34:38 +0200 Subject: [PATCH 09/12] Fix comment in test_bucket.py --- tests/s3/test_bucket.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py index 9752a9e1..b1fdedbc 100644 --- a/tests/s3/test_bucket.py +++ b/tests/s3/test_bucket.py @@ -2,8 +2,7 @@ Test the `Bucket` class in `src/gardenlinux/s3/bucket.py` by using mock AWS interactions provided by the `moto` module. -The `@mock_aws` decorator `moto` provides ensures a fresh -but fake AWS-like environment. +A mock AWS environment is provided by the `s3_setup` fixture found in `conftest.py`. """ import io From 32caa13e378b39477cc718fe35c0bf3e0a364110 Mon Sep 17 00:00:00 2001 From: Tiara Lena Hock Date: Thu, 31 Jul 2025 16:09:17 +0200 Subject: [PATCH 10/12] Tests: Use original CName and use dataclass instead of tuple for fixture --- tests/s3/conftest.py | 44 ++++++++++++++++------ tests/s3/test_bucket.py | 36 +++++++++--------- tests/s3/test_s3_artifacts.py | 69 +++++++++++++++++------------------ 3 files changed, 85 insertions(+), 64 deletions(-) diff --git a/tests/s3/conftest.py b/tests/s3/conftest.py index 23f5db6d..949429ee 100644 --- a/tests/s3/conftest.py +++ b/tests/s3/conftest.py @@ -1,17 +1,39 @@ import pytest -from pathlib import Path +from dataclasses import dataclass from moto import mock_aws from hashlib import md5, sha256 import boto3 +from gardenlinux.features.cname import CName as RealCName + +BUCKET_NAME = "test-bucket" +REGION = "us-east-1" + # Dummy CName replacement -class DummyCName: - def __init__(self, cname): # pylint: disable=unused-argument - self.platform = "aws" - self.arch = "amd64" - self.version = "1234.1" - self.commit_id = "abc123" +class DummyCName(RealCName): + def __init__(self, cname: str): # pylint: disable=unused-argument + super().__init__(cname) + + +@dataclass(frozen=True) +class S3Env: + s3: object + bucket_name: str + tmp_path: str + cname: str + + +def make_cname( + flavor: str = "testcname", + arch: str = "amd64", + version: str = "1234.1", + commit: str = "abc123", +) -> str: + """ + Helper function to build cname. Can be used to customized the cname. + """ + return f"{flavor}-{arch}-{version}-{commit}" # Helpers to compute digests for fake files @@ -36,11 +58,11 @@ def s3_setup(tmp_path, monkeypatch): Provides a clean S3 setup for each test. """ with mock_aws(): - s3 = boto3.resource("s3", region_name="us-east-1") - bucket_name = "test-bucket" - s3.create_bucket(Bucket=bucket_name) + s3 = boto3.resource("s3", region_name=REGION) + s3.create_bucket(Bucket=BUCKET_NAME) monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName) monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) - yield s3, bucket_name, tmp_path + cname = make_cname() + yield S3Env(s3, BUCKET_NAME, tmp_path, cname) diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py index b1fdedbc..bcf14e0a 100644 --- a/tests/s3/test_bucket.py +++ b/tests/s3/test_bucket.py @@ -20,10 +20,10 @@ def test_objects_empty(s3_setup): List objects from empty bucket. """ # Arrange - s3, bucket_name, _ = s3_setup + env = s3_setup # Act - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) # Assert assert list(bucket.objects) == [] @@ -35,13 +35,13 @@ def test_upload_file_and_list(s3_setup): to list it """ # Arrange - s3, bucket_name, tmp_path = s3_setup + env = s3_setup - test_file = tmp_path / "example.txt" + test_file = env.tmp_path / "example.txt" test_file.write_text("hello moto") # Act - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) bucket.upload_file(str(test_file), "example.txt") all_keys = [obj.key for obj in bucket.objects] @@ -55,12 +55,12 @@ def test_download_file(s3_setup): Try to download a file pre-existing in the bucket """ # Arrange - s3, bucket_name, tmp_path = s3_setup - s3.Object(bucket_name, "file.txt").put(Body=b"some data") + env = s3_setup + env.s3.Object(env.bucket_name, "file.txt").put(Body=b"some data") # Act - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) - target_path = tmp_path / "downloaded.txt" + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) + target_path = env.tmp_path / "downloaded.txt" bucket.download_file("file.txt", str(target_path)) # Assert @@ -72,15 +72,15 @@ def test_upload_fileobj(s3_setup): Upload a file-like in-memory object to the bucket """ # Arrange - s3, bucket_name, _ = s3_setup + env = s3_setup # Act # Create in-memory binary stream (file content) data = io.BytesIO(b"Test Data") - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) bucket.upload_fileobj(data, "binary.obj") - obj = s3.Object(bucket_name, "binary.obj").get() + obj = env.s3.Object(env.bucket_name, "binary.obj").get() # Assert assert obj["Body"].read() == b"Test Data" @@ -91,12 +91,12 @@ def test_download_fileobj(s3_setup): Download data into a in-memory object """ # Arange - s3, bucket_name, _ = s3_setup + env = s3_setup # Put some object in the bucket - s3.Object(bucket_name, "somekey").put(Body=b"123abc") + env.s3.Object(env.bucket_name, "somekey").put(Body=b"123abc") # Act - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) # Create empty in-memory bytestream to act as a writable file output = io.BytesIO() bucket.download_fileobj("somekey", output) @@ -115,11 +115,11 @@ def test_getattr_delegates(s3_setup): the call to the real boto3 bucket. """ # Arrange - _, bucket_name, _ = s3_setup + env = s3_setup # Act - bucket = Bucket(bucket_name, s3_resource_config={"region_name": REGION}) + bucket = Bucket(env.bucket_name, s3_resource_config={"region_name": REGION}) # Assert # __getattr__ should delegate this to the underlying boto3 Bucket object - assert bucket.name == bucket_name + assert bucket.name == env.bucket_name diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index 4c006fa7..21750a70 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -4,7 +4,6 @@ from gardenlinux.s3.s3_artifacts import S3Artifacts -CNAME = "testcname" RELEASE_DATA = """ GARDENLINUX_VERSION = 1234.1 GARDENLINUX_COMMIT_ID = abc123 @@ -15,13 +14,13 @@ def test_s3artifacts_init_success(s3_setup): # Arrange - _, bucket_name, _ = s3_setup + env = s3_setup # Act - s3_artifacts = S3Artifacts(bucket_name) + s3_artifacts = S3Artifacts(env.bucket_name) # Assert - assert s3_artifacts._bucket.name == bucket_name + assert s3_artifacts._bucket.name == env.bucket_name def tets_s3artifacts_invalid_bucket(): @@ -35,22 +34,22 @@ def test_download_to_directory_success(s3_setup): Test download of multiple files to a directory on disk. """ # Arrange - s3, bucket_name, _ = s3_setup - bucket = s3.Bucket(bucket_name) + env = s3_setup + bucket = env.s3.Bucket(env.bucket_name) - bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"metadata") - bucket.put_object(Key=f"objects/{CNAME}/file1", Body=b"data1") - bucket.put_object(Key=f"objects/{CNAME}/file2", Body=b"data2") + bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"metadata") + bucket.put_object(Key=f"objects/{env.cname}/file1", Body=b"data1") + bucket.put_object(Key=f"objects/{env.cname}/file2", Body=b"data2") with TemporaryDirectory() as tmpdir: outdir = Path(tmpdir) # Act - artifacts = S3Artifacts(bucket_name) - artifacts.download_to_directory(CNAME, outdir) + artifacts = S3Artifacts(env.bucket_name) + artifacts.download_to_directory(env.cname, outdir) # Assert - assert (outdir / f"{CNAME}.s3_metadata.yaml").read_bytes() == b"metadata" + assert (outdir / f"{env.cname}.s3_metadata.yaml").read_bytes() == b"metadata" assert (outdir / "file1").read_bytes() == b"data1" assert (outdir / "file2").read_bytes() == b"data2" @@ -60,12 +59,12 @@ def test_download_to_directory_invalid_path(s3_setup): Test proper handling of download attempt to invalid path. """ # Arrange - _, bucket_name, _ = s3_setup - artifacts = S3Artifacts(bucket_name) + env = s3_setup + artifacts = S3Artifacts(env.bucket_name) # Act / Assert with pytest.raises(RuntimeError): - artifacts.download_to_directory({CNAME}, "/invalid/path/does/not/exist") + artifacts.download_to_directory({env.cname}, "/invalid/path/does/not/exist") def test_upload_from_directory_success(s3_setup): @@ -73,24 +72,24 @@ def test_upload_from_directory_success(s3_setup): Test upload of multiple artifacts from disk to bucket """ # Arrange - s3, bucket_name, tmp_path = s3_setup + env = s3_setup - release_path = tmp_path / f"{CNAME}.release" + release_path = env.tmp_path / f"{env.cname}.release" release_path.write_text(RELEASE_DATA) - for filename in [f"{CNAME}-file1", f"{CNAME}-file2"]: - (tmp_path / filename).write_bytes(b"dummy content") + for filename in [f"{env.cname}-file1", f"{env.cname}-file2"]: + (env.tmp_path / filename).write_bytes(b"dummy content") # Act - artifacts = S3Artifacts(bucket_name) - artifacts.upload_from_directory(CNAME, tmp_path) + artifacts = S3Artifacts(env.bucket_name) + artifacts.upload_from_directory(env.cname, env.tmp_path) # Assert - bucket = s3.Bucket(bucket_name) + bucket = env.s3.Bucket(env.bucket_name) keys = [obj.key for obj in bucket.objects.all()] - assert f"objects/{CNAME}/{CNAME}-file1" in keys - assert f"objects/{CNAME}/{CNAME}-file2" in keys - assert f"meta/singles/{CNAME}" in keys + assert f"objects/{env.cname}/{env.cname}-file1" in keys + assert f"objects/{env.cname}/{env.cname}-file2" in keys + assert f"meta/singles/{env.cname}" in keys def test_upload_from_directory_with_delete(s3_setup): @@ -98,29 +97,29 @@ def test_upload_from_directory_with_delete(s3_setup): Test that upload_from_directory deletes existing files before uploading when delete_before_push=True. """ - s3, bucket_name, tmp_path = s3_setup - bucket = s3.Bucket(bucket_name) + env = s3_setup + bucket = env.s3.Bucket(env.bucket_name) # Arrange: create release and artifact files locally - release = tmp_path / f"{CNAME}.release" + release = env.tmp_path / f"{env.cname}.release" release.write_text(RELEASE_DATA) - artifact = tmp_path / f"{CNAME}.kernel" + artifact = env.tmp_path / f"{env.cname}.kernel" artifact.write_bytes(b"fake") # Arrange: put dummy existing objects to be deleted - bucket.put_object(Key=f"objects/{CNAME}/{artifact.name}", Body=b"old data") - bucket.put_object(Key=f"meta/singles/{CNAME}", Body=b"old metadata") + bucket.put_object(Key=f"objects/{env.cname}/{artifact.name}", Body=b"old data") + bucket.put_object(Key=f"meta/singles/{env.cname}", Body=b"old metadata") - artifacts = S3Artifacts(bucket_name) + artifacts = S3Artifacts(env.bucket_name) # Act - artifacts.upload_from_directory(CNAME, tmp_path, delete_before_push=True) + artifacts.upload_from_directory(env.cname, env.tmp_path, delete_before_push=True) # Assert keys = [obj.key for obj in bucket.objects.all()] # The old key should no longer be present as old data (no duplicates) # but the new upload file key should exist (artifact uploaded) - assert f"objects/{CNAME}/{artifact.name}" in keys - assert f"meta/singles/{CNAME}" in keys + assert f"objects/{env.cname}/{artifact.name}" in keys + assert f"meta/singles/{env.cname}" in keys From f3f8c2f3ab1adeb3069c28be3578d65e6d001966 Mon Sep 17 00:00:00 2001 From: Tobias Wolf Date: Thu, 31 Jul 2025 17:35:13 +0200 Subject: [PATCH 11/12] Fix `DeprecationWarning: 'maxsplit'` for `re.split()` Signed-off-by: Tobias Wolf --- src/gardenlinux/features/cname.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gardenlinux/features/cname.py b/src/gardenlinux/features/cname.py index fcd90265..d3e54e2a 100644 --- a/src/gardenlinux/features/cname.py +++ b/src/gardenlinux/features/cname.py @@ -136,7 +136,7 @@ def platform(self) -> str: :return: (str) Flavor """ - return re.split("[_-]", self._flavor, 1)[0] + return re.split("[_-]", self._flavor, maxsplit=1)[0] @property def version(self) -> Optional[str]: From 6b57f2f3507b10ff068c3764bcf4d1a16cbad1db Mon Sep 17 00:00:00 2001 From: Tobias Wolf Date: Thu, 31 Jul 2025 17:35:48 +0200 Subject: [PATCH 12/12] Code cleanup Signed-off-by: Tobias Wolf --- tests/s3/conftest.py | 11 +++-------- tests/s3/test_bucket.py | 4 +++- tests/s3/test_s3_artifacts.py | 4 +++- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/tests/s3/conftest.py b/tests/s3/conftest.py index 949429ee..734e7b76 100644 --- a/tests/s3/conftest.py +++ b/tests/s3/conftest.py @@ -1,8 +1,10 @@ -import pytest +# -*- coding: utf-8 -*- + from dataclasses import dataclass from moto import mock_aws from hashlib import md5, sha256 import boto3 +import pytest from gardenlinux.features.cname import CName as RealCName @@ -10,12 +12,6 @@ REGION = "us-east-1" -# Dummy CName replacement -class DummyCName(RealCName): - def __init__(self, cname: str): # pylint: disable=unused-argument - super().__init__(cname) - - @dataclass(frozen=True) class S3Env: s3: object @@ -61,7 +57,6 @@ def s3_setup(tmp_path, monkeypatch): s3 = boto3.resource("s3", region_name=REGION) s3.create_bucket(Bucket=BUCKET_NAME) - monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName) monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest) cname = make_cname() diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py index bcf14e0a..94b8118d 100644 --- a/tests/s3/test_bucket.py +++ b/tests/s3/test_bucket.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + """ Test the `Bucket` class in `src/gardenlinux/s3/bucket.py` by using mock AWS interactions provided by the `moto` module. @@ -5,9 +7,9 @@ A mock AWS environment is provided by the `s3_setup` fixture found in `conftest.py`. """ +from pathlib import Path import io import pytest -from pathlib import Path from gardenlinux.s3.bucket import Bucket diff --git a/tests/s3/test_s3_artifacts.py b/tests/s3/test_s3_artifacts.py index 21750a70..1cf6d845 100644 --- a/tests/s3/test_s3_artifacts.py +++ b/tests/s3/test_s3_artifacts.py @@ -1,6 +1,8 @@ -import pytest +# -*- coding: utf-8 -*- + from pathlib import Path from tempfile import TemporaryDirectory +import pytest from gardenlinux.s3.s3_artifacts import S3Artifacts