|
1 | | -import pytest |
2 | | -from handlers.bulk_upload_metadata_handler import lambda_handler |
3 | | -from models.staging_metadata import METADATA_FILENAME |
4 | | -from services.bulk_upload_metadata_service import BulkUploadMetadataService |
| 1 | +from unittest.mock import Mock |
5 | 2 |
|
| 3 | +from services.bulk_upload_metadata_processor_service import ( |
| 4 | + BulkUploadMetadataProcessorService, |
| 5 | +) |
6 | 6 |
|
7 | | -def test_lambda_call_process_metadata_of_service_class( |
8 | | - set_env, event, context, mock_metadata_service |
9 | | -): |
10 | | - lambda_handler(event, context) |
11 | 7 |
|
12 | | - mock_metadata_service.process_metadata.assert_called_once_with(METADATA_FILENAME) |
| 8 | +def test_copy_metadata_to_dated_folder_copies_and_deletes(mocker, monkeypatch): |
| 9 | + monkeypatch.setenv("STAGING_STORE_BUCKET_NAME", "staging-bucket") |
| 10 | + monkeypatch.setenv("METADATA_SQS_QUEUE_URL", "https://example.com/metadata-queue") |
| 11 | + monkeypatch.setenv("EXPEDITE_SQS_QUEUE_URL", "https://example.com/expedite-queue") |
13 | 12 |
|
| 13 | + mocker.patch( |
| 14 | + "services.bulk_upload_metadata_processor_service.S3Service", |
| 15 | + autospec=True, |
| 16 | + ) |
| 17 | + mocker.patch( |
| 18 | + "services.bulk_upload_metadata_processor_service.SQSService", |
| 19 | + autospec=True, |
| 20 | + ) |
| 21 | + mocker.patch( |
| 22 | + "services.bulk_upload_metadata_processor_service.BulkUploadDynamoRepository", |
| 23 | + autospec=True, |
| 24 | + ) |
| 25 | + mocker.patch( |
| 26 | + "services.bulk_upload_metadata_processor_service.BulkUploadSqsRepository", |
| 27 | + autospec=True, |
| 28 | + ) |
| 29 | + mocker.patch( |
| 30 | + "services.bulk_upload_metadata_processor_service.BulkUploadS3Repository", |
| 31 | + autospec=True, |
| 32 | + ) |
| 33 | + mocker.patch( |
| 34 | + "services.bulk_upload_metadata_processor_service.get_virus_scan_service", |
| 35 | + autospec=True, |
| 36 | + ) |
14 | 37 |
|
15 | | -@pytest.fixture |
16 | | -def mock_metadata_service(mocker): |
17 | | - mocked_instance = mocker.patch( |
18 | | - "handlers.bulk_upload_metadata_handler.BulkUploadMetadataService", |
19 | | - spec=BulkUploadMetadataService, |
20 | | - ).return_value |
21 | | - yield mocked_instance |
| 38 | + mocked_datetime = mocker.patch( |
| 39 | + "services.bulk_upload_metadata_processor_service.datetime", |
| 40 | + ) |
| 41 | + mocked_datetime.now.return_value.strftime.return_value = "2026-03-05_12-34" |
| 42 | + |
| 43 | + formatter_service = Mock() |
| 44 | + |
| 45 | + service = BulkUploadMetadataProcessorService( |
| 46 | + metadata_formatter_service=formatter_service, |
| 47 | + metadata_heading_remap={}, |
| 48 | + input_file_location="some/dir/metadata.csv", |
| 49 | + ) |
| 50 | + |
| 51 | + service.s3_service = Mock() |
| 52 | + |
| 53 | + service.copy_metadata_to_dated_folder() |
| 54 | + |
| 55 | + expected_destination_key = "metadata/some/dir_2026-03-05_12-34.csv" |
| 56 | + |
| 57 | + service.s3_service.copy_across_bucket.assert_called_once_with( |
| 58 | + "staging-bucket", |
| 59 | + "some/dir/metadata.csv", |
| 60 | + "staging-bucket", |
| 61 | + expected_destination_key, |
| 62 | + ) |
| 63 | + service.s3_service.delete_object.assert_called_once_with( |
| 64 | + "staging-bucket", |
| 65 | + "some/dir/metadata.csv", |
| 66 | + ) |
0 commit comments