Skip to content

Commit b4720af

Browse files
[PRMP-1527] Include the ingestion location in the metadata outputs (#1155)
Co-authored-by: Robert Gaskin <106234256+robg-test@users.noreply.github.com>
1 parent 116bab2 commit b4720af

2 files changed

Lines changed: 65 additions & 17 deletions

File tree

lambdas/services/bulk_upload_metadata_processor_service.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -415,11 +415,14 @@ def copy_metadata_to_dated_folder(self):
415415
"""Copy processed metadata CSV into a dated archive folder in S3."""
416416
logger.info("Copying metadata CSV to dated folder")
417417
current_datetime = datetime.now().strftime("%Y-%m-%d_%H-%M")
418+
original_path_directory = str(Path(self.file_key).parent)
419+
logger.info(f"Original file key is {self.file_key}")
420+
destination_key = f"metadata/{original_path_directory}_{current_datetime}.csv"
418421
self.s3_service.copy_across_bucket(
419422
self.staging_bucket_name,
420423
self.file_key,
421424
self.staging_bucket_name,
422-
f"metadata/{current_datetime}.csv",
425+
destination_key,
423426
)
424427
self.s3_service.delete_object(self.staging_bucket_name, self.file_key)
425428

Lines changed: 61 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,66 @@
1-
import pytest
2-
from handlers.bulk_upload_metadata_handler import lambda_handler
3-
from models.staging_metadata import METADATA_FILENAME
4-
from services.bulk_upload_metadata_service import BulkUploadMetadataService
1+
from unittest.mock import Mock
52

3+
from services.bulk_upload_metadata_processor_service import (
4+
BulkUploadMetadataProcessorService,
5+
)
66

7-
def test_lambda_call_process_metadata_of_service_class(
8-
set_env, event, context, mock_metadata_service
9-
):
10-
lambda_handler(event, context)
117

12-
mock_metadata_service.process_metadata.assert_called_once_with(METADATA_FILENAME)
8+
def test_copy_metadata_to_dated_folder_copies_and_deletes(mocker, monkeypatch):
9+
monkeypatch.setenv("STAGING_STORE_BUCKET_NAME", "staging-bucket")
10+
monkeypatch.setenv("METADATA_SQS_QUEUE_URL", "https://example.com/metadata-queue")
11+
monkeypatch.setenv("EXPEDITE_SQS_QUEUE_URL", "https://example.com/expedite-queue")
1312

13+
mocker.patch(
14+
"services.bulk_upload_metadata_processor_service.S3Service",
15+
autospec=True,
16+
)
17+
mocker.patch(
18+
"services.bulk_upload_metadata_processor_service.SQSService",
19+
autospec=True,
20+
)
21+
mocker.patch(
22+
"services.bulk_upload_metadata_processor_service.BulkUploadDynamoRepository",
23+
autospec=True,
24+
)
25+
mocker.patch(
26+
"services.bulk_upload_metadata_processor_service.BulkUploadSqsRepository",
27+
autospec=True,
28+
)
29+
mocker.patch(
30+
"services.bulk_upload_metadata_processor_service.BulkUploadS3Repository",
31+
autospec=True,
32+
)
33+
mocker.patch(
34+
"services.bulk_upload_metadata_processor_service.get_virus_scan_service",
35+
autospec=True,
36+
)
1437

15-
@pytest.fixture
16-
def mock_metadata_service(mocker):
17-
mocked_instance = mocker.patch(
18-
"handlers.bulk_upload_metadata_handler.BulkUploadMetadataService",
19-
spec=BulkUploadMetadataService,
20-
).return_value
21-
yield mocked_instance
38+
mocked_datetime = mocker.patch(
39+
"services.bulk_upload_metadata_processor_service.datetime",
40+
)
41+
mocked_datetime.now.return_value.strftime.return_value = "2026-03-05_12-34"
42+
43+
formatter_service = Mock()
44+
45+
service = BulkUploadMetadataProcessorService(
46+
metadata_formatter_service=formatter_service,
47+
metadata_heading_remap={},
48+
input_file_location="some/dir/metadata.csv",
49+
)
50+
51+
service.s3_service = Mock()
52+
53+
service.copy_metadata_to_dated_folder()
54+
55+
expected_destination_key = "metadata/some/dir_2026-03-05_12-34.csv"
56+
57+
service.s3_service.copy_across_bucket.assert_called_once_with(
58+
"staging-bucket",
59+
"some/dir/metadata.csv",
60+
"staging-bucket",
61+
expected_destination_key,
62+
)
63+
service.s3_service.delete_object.assert_called_once_with(
64+
"staging-bucket",
65+
"some/dir/metadata.csv",
66+
)

0 commit comments

Comments
 (0)