From 328626a004b439ee9dd29113228b0ef0d8ae0471 Mon Sep 17 00:00:00 2001 From: Daniel Yip Date: Thu, 12 Feb 2026 09:32:22 +0000 Subject: [PATCH 01/14] VED-981 Add base infrastructure for the MNS Publisher feature (#1191) --- .github/workflows/quality-checks.yml | 11 + infrastructure/instance/dynamodb.tf | 2 + .../instance/mns_outbound_events_eb_pipe.tf | 118 +++++++++ .../instance/mns_publisher_lambda.tf | 238 ++++++++++++++++++ .../instance/sqs_mns_outbound_events.tf | 33 +++ lambdas/mns_publisher/Dockerfile | 34 +++ lambdas/mns_publisher/Makefile | 22 ++ lambdas/mns_publisher/README.md | 3 + lambdas/mns_publisher/poetry.lock | 137 ++++++++++ lambdas/mns_publisher/pyproject.toml | 19 ++ lambdas/mns_publisher/src/__init__.py | 0 lambdas/mns_publisher/src/lambda_handler.py | 10 + lambdas/mns_publisher/tests/__init__.py | 0 .../tests/test_lambda_handler.py | 9 + sonar-project.properties | 2 +- 15 files changed, 637 insertions(+), 1 deletion(-) create mode 100644 infrastructure/instance/mns_outbound_events_eb_pipe.tf create mode 100644 infrastructure/instance/mns_publisher_lambda.tf create mode 100644 infrastructure/instance/sqs_mns_outbound_events.tf create mode 100644 lambdas/mns_publisher/Dockerfile create mode 100644 lambdas/mns_publisher/Makefile create mode 100644 lambdas/mns_publisher/README.md create mode 100644 lambdas/mns_publisher/poetry.lock create mode 100644 lambdas/mns_publisher/pyproject.toml create mode 100644 lambdas/mns_publisher/src/__init__.py create mode 100644 lambdas/mns_publisher/src/lambda_handler.py create mode 100644 lambdas/mns_publisher/tests/__init__.py create mode 100644 lambdas/mns_publisher/tests/test_lambda_handler.py diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml index a9ecd81089..c4916e5cd4 100644 --- a/.github/workflows/quality-checks.yml +++ b/.github/workflows/quality-checks.yml @@ -173,6 +173,17 @@ jobs: poetry run coverage run --source=src -m unittest discover || echo "mesh_processor tests failed" >> ../../failed_tests.txt poetry run coverage xml -o ../../mesh_processor-coverage.xml + - name: Run unittest with mns_publisher + working-directory: lambdas/mns_publisher + id: mnspublisher + env: + PYTHONPATH: ${{ env.LAMBDA_PATH }}/mns_publisher/src:${{ env.LAMBDA_PATH }}/mns_publisher/tests:${{ env.SHARED_PATH }}/src + continue-on-error: true + run: | + poetry install + poetry run coverage run --source=src -m unittest discover || echo "mns_publisher tests failed" >> ../../failed_tests.txt + poetry run coverage xml -o ../../mns_publisher-coverage.xml + - name: Run unittest with coverage-mns-subscription working-directory: lambdas/mns_subscription id: mns_subscription diff --git a/infrastructure/instance/dynamodb.tf b/infrastructure/instance/dynamodb.tf index 0609357b35..41d1827fe1 100644 --- a/infrastructure/instance/dynamodb.tf +++ b/infrastructure/instance/dynamodb.tf @@ -68,6 +68,8 @@ resource "aws_dynamodb_table" "delta-dynamodb-table" { name = "imms-${local.resource_scope}-delta" billing_mode = "PAY_PER_REQUEST" hash_key = "PK" + stream_enabled = true + stream_view_type = "NEW_IMAGE" deletion_protection_enabled = !local.is_temp attribute { diff --git a/infrastructure/instance/mns_outbound_events_eb_pipe.tf b/infrastructure/instance/mns_outbound_events_eb_pipe.tf new file mode 100644 index 0000000000..d68b282c11 --- /dev/null +++ b/infrastructure/instance/mns_outbound_events_eb_pipe.tf @@ -0,0 +1,118 @@ +# IAM Role for EventBridge Pipe +resource "aws_iam_role" "mns_outbound_events_eb_pipe" { + name = "${local.resource_scope}-mns-outbound-eventbridge-pipe-role" + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "pipes.amazonaws.com" + } + Condition = { + StringEquals = { + "aws:SourceAccount" = var.immunisation_account_id + } + } + } + ] + }) +} + +resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_source_policy" { + role = aws_iam_role.mns_outbound_events_eb_pipe.id + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Effect" : "Allow", + "Action" : [ + "dynamodb:DescribeStream", + "dynamodb:GetRecords", + "dynamodb:GetShardIterator", + "dynamodb:ListStreams" + ], + "Resource" : aws_dynamodb_table.delta-dynamodb-table.stream_arn + }, + { + "Effect" : "Allow", + "Action" : [ + "kms:Decrypt", + "kms:GenerateDataKey" + ], + "Resource" : data.aws_kms_key.existing_dynamo_encryption_key.arn + }, + ] + }) +} + +resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_target_policy" { + role = aws_iam_role.mns_outbound_events_eb_pipe.id + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "sqs:GetQueueAttributes", + "sqs:SendMessage", + ], + Resource = [ + aws_sqs_queue.mns_outbound_events.arn, + ] + }, + ] + }) +} + +resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_cw_log_policy" { + role = aws_iam_role.mns_outbound_events_eb_pipe.id + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + Resource = [ + "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/vendedlogs/pipes/${local.resource_scope}-mns-outbound-event-pipe-logs:*", + ] + }, + ] + }) +} + +resource "aws_cloudwatch_log_group" "mns_outbound_events_eb_pipe" { + name = "/aws/vendedlogs/pipes/${local.resource_scope}-mns-outbound-event-pipe-logs" + retention_in_days = 30 +} + +resource "aws_pipes_pipe" "mns_outbound_events" { + depends_on = [ + aws_iam_role_policy.mns_outbound_events_eb_pipe_source_policy, + aws_iam_role_policy.mns_outbound_events_eb_pipe_target_policy, + aws_iam_role_policy.mns_outbound_events_eb_pipe_cw_log_policy, + ] + name = "${local.resource_scope}-mns-outbound-events" + role_arn = aws_iam_role.mns_outbound_events_eb_pipe.arn + source = aws_dynamodb_table.delta-dynamodb-table.stream_arn + target = aws_sqs_queue.mns_outbound_events.arn + + source_parameters { + dynamodb_stream_parameters { + starting_position = "TRIM_HORIZON" + } + } + + log_configuration { + include_execution_data = ["ALL"] + level = "ERROR" + cloudwatch_logs_log_destination { + log_group_arn = aws_cloudwatch_log_group.pipe_log_group.arn + } + } +} diff --git a/infrastructure/instance/mns_publisher_lambda.tf b/infrastructure/instance/mns_publisher_lambda.tf new file mode 100644 index 0000000000..ff803a0ae5 --- /dev/null +++ b/infrastructure/instance/mns_publisher_lambda.tf @@ -0,0 +1,238 @@ +locals { + mns_publisher_lambda_dir = abspath("${path.root}/../../lambdas/mns_publisher") + mns_publisher_lambda_files = fileset(local.mns_publisher_lambda_dir, "**") + mns_publisher_lambda_dir_sha = sha1(join("", [for f in local.mns_publisher_lambda_files : filesha1("${local.mns_publisher_lambda_dir}/${f}")])) + mns_publisher_lambda_name = "${local.short_prefix}-mns-publisher-lambda" +} + +resource "aws_ecr_repository" "mns_publisher_lambda_repository" { + image_scanning_configuration { + scan_on_push = true + } + name = "${local.short_prefix}-mns-publisher-repo" + force_delete = local.is_temp +} + +# Module for building and pushing Docker image to ECR +module "mns_publisher_docker_image" { + source = "terraform-aws-modules/lambda/aws//modules/docker-build" + version = "8.5.0" + docker_file_path = "./mns_publisher/Dockerfile" + + create_ecr_repo = false + ecr_repo = aws_ecr_repository.mns_publisher_lambda_repository.name + ecr_repo_lifecycle_policy = jsonencode({ + "rules" : [ + { + "rulePriority" : 1, + "description" : "Keep only the last 2 images", + "selection" : { + "tagStatus" : "any", + "countType" : "imageCountMoreThan", + "countNumber" : 2 + }, + "action" : { + "type" : "expire" + } + } + ] + }) + + platform = "linux/amd64" + use_image_tag = false + source_path = abspath("${path.root}/../../lambdas") + triggers = { + dir_sha = local.mns_publisher_lambda_dir_sha + shared_dir_sha = local.shared_dir_sha + } +} + +resource "aws_ecr_repository_policy" "mns_publisher_lambda_ecr_image_retrieval_policy" { + repository = aws_ecr_repository.mns_publisher_lambda_repository.name + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Sid" : "LambdaECRImageRetrievalPolicy", + "Effect" : "Allow", + "Principal" : { + "Service" : "lambda.amazonaws.com" + }, + "Action" : [ + "ecr:BatchGetImage", + "ecr:DeleteRepositoryPolicy", + "ecr:GetDownloadUrlForLayer", + "ecr:GetRepositoryPolicy", + "ecr:SetRepositoryPolicy" + ], + "Condition" : { + "StringLike" : { + "aws:sourceArn" : "arn:aws:lambda:${var.aws_region}:${var.immunisation_account_id}:function:${local.mns_publisher_lambda_name}" + } + } + } + ] + }) +} + +# IAM Role for Lambda +resource "aws_iam_role" "mns_publisher_lambda_exec_role" { + name = "${local.mns_publisher_lambda_name}-exec-role" + assume_role_policy = jsonencode({ + Version = "2012-10-17", + Statement = [{ + Effect = "Allow", + Sid = "", + Principal = { + Service = "lambda.amazonaws.com" + }, + Action = "sts:AssumeRole" + }] + }) +} + +# Policy for Lambda execution role +resource "aws_iam_policy" "mns_publisher_lambda_exec_policy" { + name = "${local.mns_publisher_lambda_name}-exec-policy" + policy = jsonencode({ + Version = "2012-10-17", + Statement = [ + { + Effect = "Allow" + Action = [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ] + Resource = "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/lambda/${local.mns_publisher_lambda_name}:*" + }, + { + Effect = "Allow", + Action = [ + "ec2:CreateNetworkInterface", + "ec2:DescribeNetworkInterfaces", + "ec2:DeleteNetworkInterface" + ], + Resource = "*" + }, + { + "Effect" : "Allow", + "Action" : [ + "firehose:PutRecord", + "firehose:PutRecordBatch" + ], + "Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}" + }, + { + Effect = "Allow", + Action = [ + "sqs:ReceiveMessage", + "sqs:DeleteMessage", + "sqs:GetQueueAttributes" + ], + Resource = aws_sqs_queue.mns_outbound_events.arn + } + ] + }) +} + +resource "aws_iam_policy" "mns_publisher_lambda_kms_access_policy" { + name = "${local.mns_publisher_lambda_name}-kms-policy" + description = "Allow Lambda to decrypt environment variables" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "kms:Decrypt" + ] + Resource = data.aws_kms_key.existing_lambda_encryption_key.arn + } + ] + }) +} + +# Attach the execution policy to the Lambda role +resource "aws_iam_role_policy_attachment" "mns_publisher_lambda_exec_policy_attachment" { + role = aws_iam_role.mns_publisher_lambda_exec_role.name + policy_arn = aws_iam_policy.mns_publisher_lambda_exec_policy.arn +} + +# Attach the kms policy to the Lambda role +resource "aws_iam_role_policy_attachment" "mns_publisher_lambda_kms_policy_attachment" { + role = aws_iam_role.mns_publisher_lambda_exec_role.name + policy_arn = aws_iam_policy.mns_publisher_lambda_kms_access_policy.arn +} + +# Lambda Function with Security Group and VPC. +resource "aws_lambda_function" "mns_publisher_lambda" { + function_name = local.mns_publisher_lambda_name + role = aws_iam_role.mns_publisher_lambda_exec_role.arn + package_type = "Image" + image_uri = module.mns_publisher_docker_image.image_uri + architectures = ["x86_64"] + timeout = 120 + + vpc_config { + subnet_ids = local.private_subnet_ids + security_group_ids = [data.aws_security_group.existing_securitygroup.id] + } + + environment { + variables = { + SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name + } + } + + kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn + reserved_concurrent_executions = local.is_temp ? -1 : 20 + depends_on = [ + aws_cloudwatch_log_group.mns_publisher_lambda_log_group, + aws_iam_policy.mns_publisher_lambda_exec_policy + ] +} + +resource "aws_cloudwatch_log_group" "mns_publisher_lambda_log_group" { + name = "/aws/lambda/${local.mns_publisher_lambda_name}" + retention_in_days = 30 +} + +resource "aws_lambda_event_source_mapping" "mns_outbound_event_sqs_to_lambda" { + event_source_arn = aws_sqs_queue.mns_outbound_events.arn + function_name = aws_lambda_function.mns_publisher_lambda.arn + batch_size = 10 + enabled = true +} + +resource "aws_cloudwatch_log_metric_filter" "mns_publisher_error_logs" { + count = var.error_alarm_notifications_enabled ? 1 : 0 + + name = "${local.short_prefix}-MnsPublisherErrorLogsFilter" + pattern = "%\\[ERROR\\]%" + log_group_name = aws_cloudwatch_log_group.mns_publisher_lambda_log_group.name + + metric_transformation { + name = "${local.short_prefix}-MnsPublisherErrorLogs" + namespace = "${local.short_prefix}-MnsPublisherLambda" + value = "1" + } +} + +resource "aws_cloudwatch_metric_alarm" "mns_publisher_error_alarm" { + count = var.error_alarm_notifications_enabled ? 1 : 0 + + alarm_name = "${local.mns_publisher_lambda_name}-error" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = 1 + metric_name = "${local.short_prefix}-MnsPublisherErrorLogs" + namespace = "${local.short_prefix}-MnsPublisherLambda" + period = 120 + statistic = "Sum" + threshold = 1 + alarm_description = "This sets off an alarm for any error logs found in the MNS Publisher Lambda function" + alarm_actions = [data.aws_sns_topic.imms_system_alert_errors.arn] + treat_missing_data = "notBreaching" +} diff --git a/infrastructure/instance/sqs_mns_outbound_events.tf b/infrastructure/instance/sqs_mns_outbound_events.tf new file mode 100644 index 0000000000..039ca00840 --- /dev/null +++ b/infrastructure/instance/sqs_mns_outbound_events.tf @@ -0,0 +1,33 @@ +resource "aws_sqs_queue" "mns_outbound_events" { + name = "${local.resource_scope}-mns-outbound-events" + fifo_queue = false + visibility_timeout_seconds = 180 +} + +data "aws_iam_policy_document" "mns_outbound_events_sqs_policy" { + statement { + sid = "mns-outbound-allow-eb-pipe-access" + effect = "Allow" + + principals { + type = "AWS" + identifiers = [aws_iam_role.mns_outbound_events_eb_pipe.arn] + } + + actions = [ + "sqs:SendMessage", + ] + + resources = [ + aws_sqs_queue.mns_outbound_events.arn + ] + } +} + +resource "aws_sqs_queue_policy" "mns_outbound_events_sqs" { + queue_url = aws_sqs_queue.mns_outbound_events.id + policy = data.aws_iam_policy_document.mns_outbound_events_sqs_policy.json +} + +# TODO - (follow on once we have basics set up so Lambda coding can start) +# Add KMS encryption to queue, add DLQ and redrive diff --git a/lambdas/mns_publisher/Dockerfile b/lambdas/mns_publisher/Dockerfile new file mode 100644 index 0000000000..ade0dbb58f --- /dev/null +++ b/lambdas/mns_publisher/Dockerfile @@ -0,0 +1,34 @@ +FROM public.ecr.aws/lambda/python:3.11 AS base + +RUN mkdir -p /home/appuser && \ + echo 'appuser:x:1001:1001::/home/appuser:/sbin/nologin' >> /etc/passwd && \ + echo 'appuser:x:1001:' >> /etc/group && \ + chown -R 1001:1001 /home/appuser && pip install "poetry~=2.1.4" + +# Copy mns_publisher Poetry files +COPY ./mns_publisher/poetry.lock ./mns_publisher/pyproject.toml ./ + +# Install mns_publisher dependencies +WORKDIR /var/task +RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main + +# ----------------------------- +FROM base AS build + +# Set working directory back to Lambda task root +WORKDIR /var/task + +# Copy shared source code +COPY ./shared/src/common ./common + +# Copy mns_publisher source code +COPY ./mns_publisher/src . + +# Set correct permissions +RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d) + +# Switch to the non-root user for running the container +USER 1001:1001 + +# Set the Lambda handler +CMD ["lambda_handler.lambda_handler"] diff --git a/lambdas/mns_publisher/Makefile b/lambdas/mns_publisher/Makefile new file mode 100644 index 0000000000..d50de37aaf --- /dev/null +++ b/lambdas/mns_publisher/Makefile @@ -0,0 +1,22 @@ +TEST_ENV := @PYTHONPATH=src:tests:../shared/src + +build: + docker build -t mnspublisher-lambda-build -f Dockerfile . + +package: build + mkdir -p build + docker run --rm -v $(shell pwd)/build:/build mnspublisher-lambda-build + +test: + $(TEST_ENV) python -m unittest + +coverage-run: + $(TEST_ENV) coverage run --source=src -m unittest discover + +coverage-report: + $(TEST_ENV) coverage report -m + +coverage-html: + $(TEST_ENV) coverage html + +.PHONY: build package test diff --git a/lambdas/mns_publisher/README.md b/lambdas/mns_publisher/README.md new file mode 100644 index 0000000000..dea5a78fa4 --- /dev/null +++ b/lambdas/mns_publisher/README.md @@ -0,0 +1,3 @@ +# mns-publisher + +Add description - TODO diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock new file mode 100644 index 0000000000..dd85f09247 --- /dev/null +++ b/lambdas/mns_publisher/poetry.lock @@ -0,0 +1,137 @@ +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. + +[[package]] +name = "aws-lambda-typing" +version = "2.20.0" +description = "A package that provides type hints for AWS Lambda event, context and response objects" +optional = false +python-versions = "<4.0,>=3.6" +groups = ["main"] +files = [ + {file = "aws-lambda-typing-2.20.0.tar.gz", hash = "sha256:78b0d8ebab73b3a6b0da98a7969f4e9c4bb497298ec50f3217da8a8dfba17154"}, + {file = "aws_lambda_typing-2.20.0-py3-none-any.whl", hash = "sha256:1d44264cabfeab5ac38e67ddd0c874e677b2cbbae77a42d0519df470e6bbb49b"}, +] + +[[package]] +name = "coverage" +version = "7.13.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415"}, + {file = "coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def"}, + {file = "coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58"}, + {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9"}, + {file = "coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf"}, + {file = "coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95"}, + {file = "coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053"}, + {file = "coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef"}, + {file = "coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6"}, + {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9"}, + {file = "coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9"}, + {file = "coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f"}, + {file = "coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f"}, + {file = "coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459"}, + {file = "coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3"}, + {file = "coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985"}, + {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0"}, + {file = "coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246"}, + {file = "coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126"}, + {file = "coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d"}, + {file = "coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9"}, + {file = "coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242"}, + {file = "coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea"}, + {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a"}, + {file = "coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d"}, + {file = "coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd"}, + {file = "coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af"}, + {file = "coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d"}, + {file = "coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9"}, + {file = "coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0"}, + {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b"}, + {file = "coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9"}, + {file = "coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd"}, + {file = "coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997"}, + {file = "coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601"}, + {file = "coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a"}, + {file = "coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5"}, + {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0"}, + {file = "coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb"}, + {file = "coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505"}, + {file = "coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2"}, + {file = "coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056"}, + {file = "coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72"}, + {file = "coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39"}, + {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0"}, + {file = "coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea"}, + {file = "coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932"}, + {file = "coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b"}, + {file = "coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0"}, + {file = "coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[metadata] +lock-version = "2.1" +python-versions = "~3.11" +content-hash = "0a2b2f2ca62bb0da43789e3a2c1c8c943545f1461b988de1c012d9de64cae545" diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml new file mode 100644 index 0000000000..009b8bc9a0 --- /dev/null +++ b/lambdas/mns_publisher/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "mns_publisher" +version = "1.0.0" +description = "Lambda function to publish immunisation events to MNS" +authors = ["VED Team "] +readme = "README.md" +packages = [ + {include = "src"}, + {include = "common", from = "../shared/src"} +] + +[tool.poetry.dependencies] +python = "~3.11" +aws-lambda-typing = "~2.20.0" +coverage = "^7.13.2" + +[build-system] +requires = ["poetry-core >= 1.5.0"] +build-backend = "poetry.core.masonry.api" diff --git a/lambdas/mns_publisher/src/__init__.py b/lambdas/mns_publisher/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py new file mode 100644 index 0000000000..0dbec66812 --- /dev/null +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -0,0 +1,10 @@ +from aws_lambda_typing import context, events + + +def lambda_handler(event: events.SQSEvent, _: context.Context) -> bool: + event_records = event.get("Records", []) + + for record in event_records: + print(record) + + return True diff --git a/lambdas/mns_publisher/tests/__init__.py b/lambdas/mns_publisher/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py new file mode 100644 index 0000000000..d6409ff2a4 --- /dev/null +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -0,0 +1,9 @@ +from unittest import TestCase +from unittest.mock import Mock + +from lambda_handler import lambda_handler + + +class TestLambdaHandler(TestCase): + def test_lambda_handler_returns_true(self): + lambda_handler({"Records": [{"messageId": "1234"}]}, Mock()) diff --git a/sonar-project.properties b/sonar-project.properties index c09d09b026..281ab9433e 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,7 +4,7 @@ sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 sonar.exclusions=**/proxies/**,**/utilities/scripts/**,**/infrastructure/account/**,**/infrastructure/instance/**,**/terraform_aws_backup/**,**/tests/** -sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml +sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml,mns_publisher-coverage.xml sonar.cpd.exclusions=**/Dockerfile sonar.issue.ignore.multicriteria=exclude_http_urls,exclude_writable_dirs,exclude_force_dict sonar.issue.ignore.multicriteria.exclude_http_urls.ruleKey=python:S5332 From 605eb2f47fba6a3b39a62f0dda8b4da4295f8530 Mon Sep 17 00:00:00 2001 From: Daniel Yip Date: Mon, 16 Feb 2026 13:29:03 +0000 Subject: [PATCH 02/14] VED-981 (Part 2) Add DLQ, redrive policy, and encryption to queues (#1203) --- infrastructure/instance/dynamodb.tf | 4 +- .../dev/internal-qa/variables.tfvars | 1 + .../environments/dev/pr/variables.tfvars | 1 + .../preprod/int-blue/variables.tfvars | 1 + .../preprod/int-green/variables.tfvars | 1 + .../environments/prod/blue/variables.tfvars | 1 + .../environments/prod/green/variables.tfvars | 1 + infrastructure/instance/mns_publisher.tf | 22 +++++ .../mns_outbound_events_eb_pipe.tf | 16 ++-- .../mns_outbound_events_kms_key.tf | 87 +++++++++++++++++++ .../mns_publisher}/mns_publisher_lambda.tf | 46 ++++++---- .../mns_publisher/sqs_mns_outbound_events.tf | 49 +++++++++++ .../modules/mns_publisher/variables.tf | 74 ++++++++++++++++ .../instance/sqs_mns_outbound_events.tf | 33 ------- infrastructure/instance/variables.tf | 7 +- 15 files changed, 281 insertions(+), 63 deletions(-) create mode 100644 infrastructure/instance/mns_publisher.tf rename infrastructure/instance/{ => modules/mns_publisher}/mns_outbound_events_eb_pipe.tf (81%) create mode 100644 infrastructure/instance/modules/mns_publisher/mns_outbound_events_kms_key.tf rename infrastructure/instance/{ => modules/mns_publisher}/mns_publisher_lambda.tf (83%) create mode 100644 infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf create mode 100644 infrastructure/instance/modules/mns_publisher/variables.tf delete mode 100644 infrastructure/instance/sqs_mns_outbound_events.tf diff --git a/infrastructure/instance/dynamodb.tf b/infrastructure/instance/dynamodb.tf index 41d1827fe1..0cfcbbbf06 100644 --- a/infrastructure/instance/dynamodb.tf +++ b/infrastructure/instance/dynamodb.tf @@ -68,8 +68,8 @@ resource "aws_dynamodb_table" "delta-dynamodb-table" { name = "imms-${local.resource_scope}-delta" billing_mode = "PAY_PER_REQUEST" hash_key = "PK" - stream_enabled = true - stream_view_type = "NEW_IMAGE" + stream_enabled = var.mns_publisher_feature_enabled + stream_view_type = var.mns_publisher_feature_enabled ? "NEW_IMAGE" : null deletion_protection_enabled = !local.is_temp attribute { diff --git a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars index c90d226b74..d671f09c6d 100644 --- a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars @@ -3,5 +3,6 @@ immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" error_alarm_notifications_enabled = false +mns_publisher_feature_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/dev/pr/variables.tfvars b/infrastructure/instance/environments/dev/pr/variables.tfvars index c90d226b74..7d17c90f95 100644 --- a/infrastructure/instance/environments/dev/pr/variables.tfvars +++ b/infrastructure/instance/environments/dev/pr/variables.tfvars @@ -3,5 +3,6 @@ immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" error_alarm_notifications_enabled = false +mns_publisher_feature_enabled = true # Switch this off once tested fully e2e in Lambda branch create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars index f246d30e0e..afd512ca36 100644 --- a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" error_alarm_notifications_enabled = true +mns_publisher_feature_enabled = true # mesh no invocation period metric set to 3 days (in seconds) for preprod environment i.e 3 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 259200 diff --git a/infrastructure/instance/environments/preprod/int-green/variables.tfvars b/infrastructure/instance/environments/preprod/int-green/variables.tfvars index f246d30e0e..afd512ca36 100644 --- a/infrastructure/instance/environments/preprod/int-green/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-green/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" error_alarm_notifications_enabled = true +mns_publisher_feature_enabled = true # mesh no invocation period metric set to 3 days (in seconds) for preprod environment i.e 3 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 259200 diff --git a/infrastructure/instance/environments/prod/blue/variables.tfvars b/infrastructure/instance/environments/prod/blue/variables.tfvars index aa33184631..ff6518b348 100644 --- a/infrastructure/instance/environments/prod/blue/variables.tfvars +++ b/infrastructure/instance/environments/prod/blue/variables.tfvars @@ -4,6 +4,7 @@ dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" error_alarm_notifications_enabled = true +mns_publisher_feature_enabled = true # mesh no invocation period metric set to 1 day (in seconds) for prod environment i.e 1 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 86400 diff --git a/infrastructure/instance/environments/prod/green/variables.tfvars b/infrastructure/instance/environments/prod/green/variables.tfvars index aa33184631..ff6518b348 100644 --- a/infrastructure/instance/environments/prod/green/variables.tfvars +++ b/infrastructure/instance/environments/prod/green/variables.tfvars @@ -4,6 +4,7 @@ dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" error_alarm_notifications_enabled = true +mns_publisher_feature_enabled = true # mesh no invocation period metric set to 1 day (in seconds) for prod environment i.e 1 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 86400 diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf new file mode 100644 index 0000000000..f705abdf92 --- /dev/null +++ b/infrastructure/instance/mns_publisher.tf @@ -0,0 +1,22 @@ +module "mns_publisher" { + source = "./modules/mns_publisher" + count = var.mns_publisher_feature_enabled ? 1 : 0 + + ddb_delta_stream_arn = aws_dynamodb_table.delta-dynamodb-table.stream_arn + dynamo_kms_encryption_key_arn = data.aws_kms_key.existing_dynamo_encryption_key.arn + enable_lambda_alarm = var.error_alarm_notifications_enabled # consider just INT and PROD + immunisation_account_id = var.immunisation_account_id + is_temp = local.is_temp + lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn + mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" + + private_subnet_ids = local.private_subnet_ids + security_group_id = data.aws_security_group.existing_securitygroup.id + + shared_dir_sha = local.shared_dir_sha + splunk_firehose_stream_name = module.splunk.firehose_stream_name + + short_prefix = local.short_prefix + + system_alarm_sns_topic_arn = data.aws_sns_topic.imms_system_alert_errors.arn +} diff --git a/infrastructure/instance/mns_outbound_events_eb_pipe.tf b/infrastructure/instance/modules/mns_publisher/mns_outbound_events_eb_pipe.tf similarity index 81% rename from infrastructure/instance/mns_outbound_events_eb_pipe.tf rename to infrastructure/instance/modules/mns_publisher/mns_outbound_events_eb_pipe.tf index d68b282c11..e3cc604574 100644 --- a/infrastructure/instance/mns_outbound_events_eb_pipe.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_outbound_events_eb_pipe.tf @@ -1,6 +1,6 @@ # IAM Role for EventBridge Pipe resource "aws_iam_role" "mns_outbound_events_eb_pipe" { - name = "${local.resource_scope}-mns-outbound-eventbridge-pipe-role" + name = "${var.mns_publisher_resource_name_prefix}-eventbridge-pipe-role" assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ @@ -33,7 +33,7 @@ resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_source_policy" { "dynamodb:GetShardIterator", "dynamodb:ListStreams" ], - "Resource" : aws_dynamodb_table.delta-dynamodb-table.stream_arn + "Resource" : var.ddb_delta_stream_arn }, { "Effect" : "Allow", @@ -41,7 +41,7 @@ resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_source_policy" { "kms:Decrypt", "kms:GenerateDataKey" ], - "Resource" : data.aws_kms_key.existing_dynamo_encryption_key.arn + "Resource" : var.dynamo_kms_encryption_key_arn }, ] }) @@ -79,7 +79,7 @@ resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_cw_log_policy" { "logs:PutLogEvents" ], Resource = [ - "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/vendedlogs/pipes/${local.resource_scope}-mns-outbound-event-pipe-logs:*", + "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/vendedlogs/pipes/${var.mns_publisher_resource_name_prefix}-pipe-logs:*", ] }, ] @@ -87,7 +87,7 @@ resource "aws_iam_role_policy" "mns_outbound_events_eb_pipe_cw_log_policy" { } resource "aws_cloudwatch_log_group" "mns_outbound_events_eb_pipe" { - name = "/aws/vendedlogs/pipes/${local.resource_scope}-mns-outbound-event-pipe-logs" + name = "/aws/vendedlogs/pipes/${var.mns_publisher_resource_name_prefix}-pipe-logs" retention_in_days = 30 } @@ -97,9 +97,9 @@ resource "aws_pipes_pipe" "mns_outbound_events" { aws_iam_role_policy.mns_outbound_events_eb_pipe_target_policy, aws_iam_role_policy.mns_outbound_events_eb_pipe_cw_log_policy, ] - name = "${local.resource_scope}-mns-outbound-events" + name = "${var.mns_publisher_resource_name_prefix}-pipe" role_arn = aws_iam_role.mns_outbound_events_eb_pipe.arn - source = aws_dynamodb_table.delta-dynamodb-table.stream_arn + source = var.ddb_delta_stream_arn target = aws_sqs_queue.mns_outbound_events.arn source_parameters { @@ -112,7 +112,7 @@ resource "aws_pipes_pipe" "mns_outbound_events" { include_execution_data = ["ALL"] level = "ERROR" cloudwatch_logs_log_destination { - log_group_arn = aws_cloudwatch_log_group.pipe_log_group.arn + log_group_arn = aws_cloudwatch_log_group.mns_outbound_events_eb_pipe.arn } } } diff --git a/infrastructure/instance/modules/mns_publisher/mns_outbound_events_kms_key.tf b/infrastructure/instance/modules/mns_publisher/mns_outbound_events_kms_key.tf new file mode 100644 index 0000000000..cf03f775f8 --- /dev/null +++ b/infrastructure/instance/modules/mns_publisher/mns_outbound_events_kms_key.tf @@ -0,0 +1,87 @@ +resource "aws_kms_key" "mns_outbound_events" { + description = "KMS key for encrypting MNS outbound immunisation events in SQS" + key_usage = "ENCRYPT_DECRYPT" + enable_key_rotation = true + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Sid = "EnableRootPermissions" + Effect = "Allow" + Principal = { + AWS = "arn:aws:iam::${var.immunisation_account_id}:root" + }, + Action = [ + "kms:Create*", + "kms:Describe*", + "kms:Enable*", + "kms:List*", + "kms:Put*", + "kms:Update*", + "kms:Revoke*", + "kms:Disable*", + "kms:Get*", + "kms:Delete*", + "kms:ScheduleKeyDeletion", + "kms:CancelKeyDeletion", + "kms:GenerateDataKey*", + "kms:Decrypt", + "kms:Tag*" + ], + Resource = "*" + }, + { + Sid = "AllowSQSUseOfKey" + Effect = "Allow" + Principal = { + Service = "sqs.amazonaws.com" + } + Action = [ + "kms:GenerateDataKey", + "kms:Decrypt" + ] + Resource = "*" + Condition = { + StringEquals = { + "kms:EncryptionContext:aws:sqs:queue_arn" = [ + "arn:aws:sqs:${var.aws_region}:${var.immunisation_account_id}:${var.mns_publisher_resource_name_prefix}-queue", + "arn:aws:sqs:${var.aws_region}:${var.immunisation_account_id}:${var.mns_publisher_resource_name_prefix}-dead-letter-queue" + ] + } + } + }, + { + Sid = "AllowLambdaToDecrypt" + Effect = "Allow" + Principal = { + AWS = "arn:aws:iam::${var.immunisation_account_id}:role/${var.short_prefix}-mns-publisher-lambda-exec-role" + } + Action = [ + "kms:Decrypt", + "kms:GenerateDataKey" + ] + Resource = "*" + }, + { + Sid = "AllowEventBridgePipesUseOfKey" + Effect = "Allow" + Principal = { + AWS = "arn:aws:iam::${var.immunisation_account_id}:role/${var.mns_publisher_resource_name_prefix}-eventbridge-pipe-role" + } + Action = [ + "kms:GenerateDataKey", + "kms:Encrypt", + "kms:DescribeKey" + + ] + Resource = "*" + } + ] + }) +} + +resource "aws_kms_alias" "mns_outbound_events_key" { + name = "alias/${var.mns_publisher_resource_name_prefix}-key" + target_key_id = aws_kms_key.mns_outbound_events.id +} diff --git a/infrastructure/instance/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf similarity index 83% rename from infrastructure/instance/mns_publisher_lambda.tf rename to infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index ff803a0ae5..2c131ad712 100644 --- a/infrastructure/instance/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -2,15 +2,15 @@ locals { mns_publisher_lambda_dir = abspath("${path.root}/../../lambdas/mns_publisher") mns_publisher_lambda_files = fileset(local.mns_publisher_lambda_dir, "**") mns_publisher_lambda_dir_sha = sha1(join("", [for f in local.mns_publisher_lambda_files : filesha1("${local.mns_publisher_lambda_dir}/${f}")])) - mns_publisher_lambda_name = "${local.short_prefix}-mns-publisher-lambda" + mns_publisher_lambda_name = "${var.short_prefix}-mns-publisher-lambda" } resource "aws_ecr_repository" "mns_publisher_lambda_repository" { image_scanning_configuration { scan_on_push = true } - name = "${local.short_prefix}-mns-publisher-repo" - force_delete = local.is_temp + name = "${var.short_prefix}-mns-publisher-repo" + force_delete = var.is_temp } # Module for building and pushing Docker image to ECR @@ -43,7 +43,7 @@ module "mns_publisher_docker_image" { source_path = abspath("${path.root}/../../lambdas") triggers = { dir_sha = local.mns_publisher_lambda_dir_sha - shared_dir_sha = local.shared_dir_sha + shared_dir_sha = var.shared_dir_sha } } @@ -122,7 +122,7 @@ resource "aws_iam_policy" "mns_publisher_lambda_exec_policy" { "firehose:PutRecord", "firehose:PutRecordBatch" ], - "Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}" + "Resource" : "arn:aws:firehose:*:*:deliverystream/${var.splunk_firehose_stream_name}" }, { Effect = "Allow", @@ -149,7 +149,15 @@ resource "aws_iam_policy" "mns_publisher_lambda_kms_access_policy" { Action = [ "kms:Decrypt" ] - Resource = data.aws_kms_key.existing_lambda_encryption_key.arn + Resource = var.dynamo_kms_encryption_key_arn + }, + { + Effect = "Allow" + Action = [ + "kms:Decrypt", + "kms:GenerateDataKey" + ] + Resource = aws_kms_key.mns_outbound_events.arn } ] }) @@ -177,18 +185,18 @@ resource "aws_lambda_function" "mns_publisher_lambda" { timeout = 120 vpc_config { - subnet_ids = local.private_subnet_ids - security_group_ids = [data.aws_security_group.existing_securitygroup.id] + subnet_ids = var.private_subnet_ids + security_group_ids = [var.security_group_id] } environment { variables = { - SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name + SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name } } - kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn - reserved_concurrent_executions = local.is_temp ? -1 : 20 + kms_key_arn = var.lambda_kms_encryption_key_arn + reserved_concurrent_executions = var.is_temp ? -1 : 20 depends_on = [ aws_cloudwatch_log_group.mns_publisher_lambda_log_group, aws_iam_policy.mns_publisher_lambda_exec_policy @@ -208,31 +216,31 @@ resource "aws_lambda_event_source_mapping" "mns_outbound_event_sqs_to_lambda" { } resource "aws_cloudwatch_log_metric_filter" "mns_publisher_error_logs" { - count = var.error_alarm_notifications_enabled ? 1 : 0 + count = var.enable_lambda_alarm ? 1 : 0 - name = "${local.short_prefix}-MnsPublisherErrorLogsFilter" + name = "${var.short_prefix}-MnsPublisherErrorLogsFilter" pattern = "%\\[ERROR\\]%" log_group_name = aws_cloudwatch_log_group.mns_publisher_lambda_log_group.name metric_transformation { - name = "${local.short_prefix}-MnsPublisherErrorLogs" - namespace = "${local.short_prefix}-MnsPublisherLambda" + name = "${var.short_prefix}-MnsPublisherErrorLogs" + namespace = "${var.short_prefix}-MnsPublisherLambda" value = "1" } } resource "aws_cloudwatch_metric_alarm" "mns_publisher_error_alarm" { - count = var.error_alarm_notifications_enabled ? 1 : 0 + count = var.enable_lambda_alarm ? 1 : 0 alarm_name = "${local.mns_publisher_lambda_name}-error" comparison_operator = "GreaterThanOrEqualToThreshold" evaluation_periods = 1 - metric_name = "${local.short_prefix}-MnsPublisherErrorLogs" - namespace = "${local.short_prefix}-MnsPublisherLambda" + metric_name = "${var.short_prefix}-MnsPublisherErrorLogs" + namespace = "${var.short_prefix}-MnsPublisherLambda" period = 120 statistic = "Sum" threshold = 1 alarm_description = "This sets off an alarm for any error logs found in the MNS Publisher Lambda function" - alarm_actions = [data.aws_sns_topic.imms_system_alert_errors.arn] + alarm_actions = [var.system_alarm_sns_topic_arn] treat_missing_data = "notBreaching" } diff --git a/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf b/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf new file mode 100644 index 0000000000..074d06e94f --- /dev/null +++ b/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf @@ -0,0 +1,49 @@ +resource "aws_sqs_queue" "mns_outbound_events" { + name = "${var.mns_publisher_resource_name_prefix}-queue" + fifo_queue = false + kms_master_key_id = aws_kms_key.mns_outbound_events.arn + visibility_timeout_seconds = 180 + redrive_policy = jsonencode({ + deadLetterTargetArn = aws_sqs_queue.mns_outbound_events_dlq.arn + maxReceiveCount = 2 + }) +} + +resource "aws_sqs_queue" "mns_outbound_events_dlq" { + name = "${var.mns_publisher_resource_name_prefix}-dead-letter-queue" + kms_master_key_id = aws_kms_key.mns_outbound_events.arn +} + +resource "aws_sqs_queue_redrive_allow_policy" "terraform_queue_redrive_allow_policy" { + queue_url = aws_sqs_queue.mns_outbound_events_dlq.id + + redrive_allow_policy = jsonencode({ + redrivePermission = "byQueue", + sourceQueueArns = [aws_sqs_queue.mns_outbound_events.arn] + }) +} + +data "aws_iam_policy_document" "mns_outbound_events_sqs_policy" { + statement { + sid = "mns-outbound-allow-eb-pipe-access" + effect = "Allow" + + principals { + type = "AWS" + identifiers = [aws_iam_role.mns_outbound_events_eb_pipe.arn] + } + + actions = [ + "sqs:SendMessage", + ] + + resources = [ + aws_sqs_queue.mns_outbound_events.arn + ] + } +} + +resource "aws_sqs_queue_policy" "mns_outbound_events_sqs" { + queue_url = aws_sqs_queue.mns_outbound_events.id + policy = data.aws_iam_policy_document.mns_outbound_events_sqs_policy.json +} diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf new file mode 100644 index 0000000000..4ffe2c5e7d --- /dev/null +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -0,0 +1,74 @@ +variable "aws_region" { + type = string + default = "eu-west-2" + description = "The AWS region to deploy the module into. Only accept eu-west-2." + + validation { + condition = var.aws_region == "eu-west-2" + error_message = "AWS Region must be set to eu-west-2." + } +} + +variable "ddb_delta_stream_arn" { + type = string + description = "The ARN of the Delta Dynamo DB Stream which the feature consumes from." +} + +variable "dynamo_kms_encryption_key_arn" { + type = string + description = "The ARN of the KMS encryption key used on data in Dynamo DB." +} + +variable "enable_lambda_alarm" { + type = bool + description = "Switch to enable an error alarm for the MNS Publisher Lambda function." +} + +variable "immunisation_account_id" { + type = string + description = "Immunisation AWS Account ID." +} + +variable "is_temp" { + type = bool + description = "Flag to state if this is a temporary environment. E.g. PR environment. Used for deletion logic." +} + +variable "lambda_kms_encryption_key_arn" { + type = string + description = "The ARN of the KMS encryption key used to encrypt Lambda function environment variables." +} + +variable "mns_publisher_resource_name_prefix" { + type = string + description = "The prefix for the name of resources within the mns_publisher feature." +} + +variable "private_subnet_ids" { + type = list(string) +} + +variable "security_group_id" { + type = string +} + +variable "shared_dir_sha" { + type = string + description = "The SHA of the shared Lambda code directory. Used for determining if a change occurred there so the Lambda function needs rebuilding." +} + +variable "splunk_firehose_stream_name" { + type = string + description = "The name of the Splunk delivery stream." +} + +variable "short_prefix" { + type = string + description = "The short prefix used for the Lambda function. Constructed and defined by the calling module, but is typically imms-internal-qa, imms-int-green etc." +} + +variable "system_alarm_sns_topic_arn" { + type = string + description = "The ARN of the SNS Topic used for raising alerts to Slack for CW alarms." +} + diff --git a/infrastructure/instance/sqs_mns_outbound_events.tf b/infrastructure/instance/sqs_mns_outbound_events.tf deleted file mode 100644 index 039ca00840..0000000000 --- a/infrastructure/instance/sqs_mns_outbound_events.tf +++ /dev/null @@ -1,33 +0,0 @@ -resource "aws_sqs_queue" "mns_outbound_events" { - name = "${local.resource_scope}-mns-outbound-events" - fifo_queue = false - visibility_timeout_seconds = 180 -} - -data "aws_iam_policy_document" "mns_outbound_events_sqs_policy" { - statement { - sid = "mns-outbound-allow-eb-pipe-access" - effect = "Allow" - - principals { - type = "AWS" - identifiers = [aws_iam_role.mns_outbound_events_eb_pipe.arn] - } - - actions = [ - "sqs:SendMessage", - ] - - resources = [ - aws_sqs_queue.mns_outbound_events.arn - ] - } -} - -resource "aws_sqs_queue_policy" "mns_outbound_events_sqs" { - queue_url = aws_sqs_queue.mns_outbound_events.id - policy = data.aws_iam_policy_document.mns_outbound_events_sqs_policy.json -} - -# TODO - (follow on once we have basics set up so Lambda coding can start) -# Add KMS encryption to queue, add DLQ and redrive diff --git a/infrastructure/instance/variables.tf b/infrastructure/instance/variables.tf index af88630698..456a5e64a2 100644 --- a/infrastructure/instance/variables.tf +++ b/infrastructure/instance/variables.tf @@ -80,13 +80,18 @@ variable "mesh_no_invocation_period_seconds" { default = 300 } -# Remember to switch off in PR envs after testing variable "error_alarm_notifications_enabled" { default = true description = "Switch to enable error alarm notifications to Slack" type = bool } +variable "mns_publisher_feature_enabled" { + default = false + description = "Switch to the MNS Publisher feature which allows us to publish Immunisation events." + type = bool +} + variable "has_sub_environment_scope" { description = "True if the sub-environment is a standalone environment, e.g. internal-dev. False if it is part of a blue-green split, e.g. int-green." type = bool From 7dff1b2c299223daf639d6628de3fba4916c264d Mon Sep 17 00:00:00 2001 From: Akinola Olutola Date: Fri, 6 Mar 2026 15:59:23 +0000 Subject: [PATCH 03/14] VED-982: Create POST MNS Notification (#1211) * build base schema --- infrastructure/instance/.terraform.lock.hcl | 27 +- .../dev/internal-dev/variables.tfvars | 1 + .../dev/internal-qa/variables.tfvars | 1 + .../environments/dev/pr/variables.tfvars | 1 + .../environments/dev/ref/variables.tfvars | 1 + .../preprod/int-blue/variables.tfvars | 1 + .../preprod/int-green/variables.tfvars | 1 + .../environments/prod/blue/variables.tfvars | 1 + .../environments/prod/green/variables.tfvars | 1 + infrastructure/instance/mns_publisher.tf | 8 + .../mns_publisher/mns_publisher_lambda.tf | 34 +- .../mns_publisher/sqs_test_publish_mns.tf | 45 + .../modules/mns_publisher/variables.tf | 42 + infrastructure/instance/outputs.tf | 10 + infrastructure/instance/variables.tf | 6 +- .../backend/src/controller/fhir_controller.py | 3 +- lambdas/backend/src/service/fhir_service.py | 3 +- .../backend/src/service/search_url_helper.py | 25 +- .../tests/service/test_search_url_helper.py | 24 +- lambdas/id_sync/src/record_processor.py | 3 +- lambdas/mns_publisher/poetry.lock | 829 +++++++++++++++++- lambdas/mns_publisher/pyproject.toml | 11 + lambdas/mns_publisher/src/constants.py | 5 + .../mns_publisher/src/create_notification.py | 130 +++ lambdas/mns_publisher/src/lambda_handler.py | 9 +- lambdas/mns_publisher/src/process_records.py | 88 ++ .../tests/sample_data/sqs_event.json | 118 +++ .../tests/test_create_notification.py | 362 ++++++++ .../tests/test_lambda_handler.py | 382 +++++++- .../tests/test_sqs_dynamo_utils.py | 0 lambdas/mns_publisher/tests/test_utils.py | 32 + lambdas/mns_subscription/src/mns_setup.py | 26 - lambdas/mns_subscription/src/subscribe_mns.py | 2 +- .../mns_subscription/src/unsubscribe_mns.py | 2 +- .../src/common/api_clients/constants.py | 29 + .../shared/src/common/api_clients/errors.py | 8 + .../src/common/api_clients/get_pds_details.py | 34 + .../src/common/api_clients/mns_service.py | 80 +- .../src/common/api_clients/mns_setup.py | 32 + .../common/api_clients/mock_mns_service.py | 34 + lambdas/shared/src/common/constants.py | 2 + lambdas/shared/src/common/get_service_url.py | 26 + .../api_clients/test_mns_service.py | 75 +- .../api_clients}/test_mns_setup.py | 8 +- .../api_clients/test_mock_mns_service.py | 96 ++ .../api_clients}/test_pds_details.py | 35 +- .../tests/test_common/test_get_service_url.py | 29 + 47 files changed, 2558 insertions(+), 164 deletions(-) create mode 100644 infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf create mode 100644 lambdas/mns_publisher/src/constants.py create mode 100644 lambdas/mns_publisher/src/create_notification.py create mode 100644 lambdas/mns_publisher/src/process_records.py create mode 100644 lambdas/mns_publisher/tests/sample_data/sqs_event.json create mode 100644 lambdas/mns_publisher/tests/test_create_notification.py create mode 100644 lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py create mode 100644 lambdas/mns_publisher/tests/test_utils.py delete mode 100644 lambdas/mns_subscription/src/mns_setup.py create mode 100644 lambdas/shared/src/common/api_clients/get_pds_details.py create mode 100644 lambdas/shared/src/common/api_clients/mns_setup.py create mode 100644 lambdas/shared/src/common/api_clients/mock_mns_service.py create mode 100644 lambdas/shared/src/common/constants.py create mode 100644 lambdas/shared/src/common/get_service_url.py rename lambdas/{mns_subscription/tests => shared/tests/test_common/api_clients}/test_mns_setup.py (79%) create mode 100644 lambdas/shared/tests/test_common/api_clients/test_mock_mns_service.py rename lambdas/{id_sync/tests => shared/tests/test_common/api_clients}/test_pds_details.py (86%) create mode 100644 lambdas/shared/tests/test_common/test_get_service_url.py diff --git a/infrastructure/instance/.terraform.lock.hcl b/infrastructure/instance/.terraform.lock.hcl index b0b61f7360..fe4aea8b38 100644 --- a/infrastructure/instance/.terraform.lock.hcl +++ b/infrastructure/instance/.terraform.lock.hcl @@ -46,23 +46,22 @@ provider "registry.terraform.io/hashicorp/external" { } provider "registry.terraform.io/hashicorp/local" { - version = "2.5.3" + version = "2.7.0" constraints = ">= 1.0.0" hashes = [ - "h1:1Nkh16jQJMp0EuDmvP/96f5Unnir0z12WyDuoR6HjMo=", - "h1:MCzg+hs1/ZQ32u56VzJMWP9ONRQPAAqAjuHuzbyshvI=", - "zh:284d4b5b572eacd456e605e94372f740f6de27b71b4e1fd49b63745d8ecd4927", - "zh:40d9dfc9c549e406b5aab73c023aa485633c1b6b730c933d7bcc2fa67fd1ae6e", - "zh:6243509bb208656eb9dc17d3c525c89acdd27f08def427a0dce22d5db90a4c8b", + "h1:sSwlfp2etjCaE9hIF7bJBDjRIhDCVFglEOVyiCI7vgs=", + "zh:261fec71bca13e0a7812dc0d8ae9af2b4326b24d9b2e9beab3d2400fab5c5f9a", + "zh:308da3b5376a9ede815042deec5af1050ec96a5a5410a2206ae847d82070a23e", + "zh:3d056924c420464dc8aba10e1915956b2e5c4d55b11ffff79aa8be563fbfe298", + "zh:643256547b155459c45e0a3e8aab0570db59923c68daf2086be63c444c8c445b", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:885d85869f927853b6fe330e235cd03c337ac3b933b0d9ae827ec32fa1fdcdbf", - "zh:bab66af51039bdfcccf85b25fe562cbba2f54f6b3812202f4873ade834ec201d", - "zh:c505ff1bf9442a889ac7dca3ac05a8ee6f852e0118dd9a61796a2f6ff4837f09", - "zh:d36c0b5770841ddb6eaf0499ba3de48e5d4fc99f4829b6ab66b0fab59b1aaf4f", - "zh:ddb6a407c7f3ec63efb4dad5f948b54f7f4434ee1a2607a49680d494b1776fe1", - "zh:e0dafdd4500bec23d3ff221e3a9b60621c5273e5df867bc59ef6b7e41f5c91f6", - "zh:ece8742fd2882a8fc9d6efd20e2590010d43db386b920b2a9c220cfecc18de47", - "zh:f4c6b3eb8f39105004cf720e202f04f57e3578441cfb76ca27611139bc116a82", + "zh:7aa4d0b853f84205e8cf79f30c9b2c562afbfa63592f7231b6637e5d7a6b5b27", + "zh:7dc251bbc487d58a6ab7f5b07ec9edc630edb45d89b761dba28e0e2ba6b1c11f", + "zh:7ee0ca546cd065030039168d780a15cbbf1765a4c70cd56d394734ab112c93da", + "zh:b1d5d80abb1906e6c6b3685a52a0192b4ca6525fe090881c64ec6f67794b1300", + "zh:d81ea9856d61db3148a4fc6c375bf387a721d78fc1fea7a8823a027272a47a78", + "zh:df0a1f0afc947b8bfc88617c1ad07a689ce3bd1a29fd97318392e6bdd32b230b", + "zh:dfbcad800240e0c68c43e0866f2a751cff09777375ec701918881acf67a268da", ] } diff --git a/infrastructure/instance/environments/dev/internal-dev/variables.tfvars b/infrastructure/instance/environments/dev/internal-dev/variables.tfvars index 188bc51e18..e3a6521f9f 100644 --- a/infrastructure/instance/environments/dev/internal-dev/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-dev/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "dev" error_alarm_notifications_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars index d671f09c6d..409096620f 100644 --- a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "dev" error_alarm_notifications_enabled = false mns_publisher_feature_enabled = true create_mesh_processor = false diff --git a/infrastructure/instance/environments/dev/pr/variables.tfvars b/infrastructure/instance/environments/dev/pr/variables.tfvars index 7d17c90f95..26f288fef0 100644 --- a/infrastructure/instance/environments/dev/pr/variables.tfvars +++ b/infrastructure/instance/environments/dev/pr/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "dev" error_alarm_notifications_enabled = false mns_publisher_feature_enabled = true # Switch this off once tested fully e2e in Lambda branch create_mesh_processor = false diff --git a/infrastructure/instance/environments/dev/ref/variables.tfvars b/infrastructure/instance/environments/dev/ref/variables.tfvars index 6b3124455a..e6256cc114 100644 --- a/infrastructure/instance/environments/dev/ref/variables.tfvars +++ b/infrastructure/instance/environments/dev/ref/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "ref" +mns_environment = "dev" error_alarm_notifications_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars index afd512ca36..3d5c79af36 100644 --- a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars @@ -2,6 +2,7 @@ environment = "preprod" immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/preprod/int-green/variables.tfvars b/infrastructure/instance/environments/preprod/int-green/variables.tfvars index afd512ca36..3d5c79af36 100644 --- a/infrastructure/instance/environments/preprod/int-green/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-green/variables.tfvars @@ -2,6 +2,7 @@ environment = "preprod" immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/prod/blue/variables.tfvars b/infrastructure/instance/environments/prod/blue/variables.tfvars index ff6518b348..447e9972d4 100644 --- a/infrastructure/instance/environments/prod/blue/variables.tfvars +++ b/infrastructure/instance/environments/prod/blue/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "664418956997" dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" +mns_environment = "prod" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/prod/green/variables.tfvars b/infrastructure/instance/environments/prod/green/variables.tfvars index ff6518b348..447e9972d4 100644 --- a/infrastructure/instance/environments/prod/green/variables.tfvars +++ b/infrastructure/instance/environments/prod/green/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "664418956997" dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" +mns_environment = "prod" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index f705abdf92..7b6cc9f94d 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -7,8 +7,16 @@ module "mns_publisher" { enable_lambda_alarm = var.error_alarm_notifications_enabled # consider just INT and PROD immunisation_account_id = var.immunisation_account_id is_temp = local.is_temp + enable_mns_test_queue = var.mns_environment == "dev" + resource_scope = local.resource_scope + imms_base_path = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" + mns_test_notification_name_prefix = "${local.resource_scope}-mns-test-notification" + secrets_manager_policy_path = "${local.policy_path}/secret_manager.json" + account_id = data.aws_caller_identity.current.account_id + pds_environment = var.pds_environment + mns_environment = var.mns_environment private_subnet_ids = local.private_subnet_ids security_group_id = data.aws_security_group.existing_securitygroup.id diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 2c131ad712..7c4d9f169d 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -191,7 +191,12 @@ resource "aws_lambda_function" "mns_publisher_lambda" { environment { variables = { - SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name + SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name + MNS_TEST_QUEUE_URL = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].url : "" + IMMUNIZATION_ENV = var.resource_scope, + IMMUNIZATION_BASE_PATH = var.imms_base_path + PDS_ENV = var.pds_environment + MNS_ENV = var.mns_environment } } @@ -203,6 +208,30 @@ resource "aws_lambda_function" "mns_publisher_lambda" { ] } + +data "aws_iam_policy_document" "mns_publisher_secrets_policy_document" { + source_policy_documents = [ + templatefile("${var.secrets_manager_policy_path}", { + "account_id" : var.account_id, + "pds_environment" : var.pds_environment + }), + ] +} + +resource "aws_iam_policy" "mns_publisher_lambda_secrets_policy" { + name = "${local.mns_publisher_lambda_name}-secrets-policy" + description = "Allow Lambda to access Secrets Manager" + policy = data.aws_iam_policy_document.mns_publisher_secrets_policy_document.json +} + + +# Attach the secrets/dynamodb access policy to the Lambda role +resource "aws_iam_role_policy_attachment" "mns_publisher_lambda_secrets_policy_attachment" { + role = aws_iam_role.mns_publisher_lambda_exec_role.name + policy_arn = aws_iam_policy.mns_publisher_lambda_secrets_policy.arn +} + + resource "aws_cloudwatch_log_group" "mns_publisher_lambda_log_group" { name = "/aws/lambda/${local.mns_publisher_lambda_name}" retention_in_days = 30 @@ -213,6 +242,9 @@ resource "aws_lambda_event_source_mapping" "mns_outbound_event_sqs_to_lambda" { function_name = aws_lambda_function.mns_publisher_lambda.arn batch_size = 10 enabled = true + + # Enables partial batch responses using `batchItemFailures` + function_response_types = ["ReportBatchItemFailures"] } resource "aws_cloudwatch_log_metric_filter" "mns_publisher_error_logs" { diff --git a/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf b/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf new file mode 100644 index 0000000000..a7cffcb32b --- /dev/null +++ b/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf @@ -0,0 +1,45 @@ +resource "aws_sqs_queue" "mns_test_notification" { + count = var.enable_mns_test_queue ? 1 : 0 + name = "${var.mns_test_notification_name_prefix}-queue" + fifo_queue = false + message_retention_seconds = 86400 + visibility_timeout_seconds = 300 +} + + +data "aws_iam_policy_document" "mns_test_notification_sqs_policy" { + count = var.enable_mns_test_queue ? 1 : 0 + statement { + sid = "mns-test-notification-allow-lambda-access" + effect = "Allow" + + principals { + type = "AWS" + identifiers = [aws_iam_role.mns_publisher_lambda_exec_role.arn] + } + + actions = [ + "sqs:SendMessage", + ] + + resources = [ + aws_sqs_queue.mns_test_notification[0].arn + ] + } +} + +resource "aws_sqs_queue_policy" "mns_test_notification_sqs" { + count = var.enable_mns_test_queue ? 1 : 0 + queue_url = aws_sqs_queue.mns_test_notification[0].id + policy = data.aws_iam_policy_document.mns_test_notification_sqs_policy[0].json +} + +output "mns_test_queue_url" { + value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].url : null + description = "URL of the MNS test notifications queue" +} + +output "mns_test_queue_arn" { + value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].arn : null + description = "ARN of the MNS test notifications queue" +} \ No newline at end of file diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index 4ffe2c5e7d..3857b9b13f 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -72,3 +72,45 @@ variable "system_alarm_sns_topic_arn" { description = "The ARN of the SNS Topic used for raising alerts to Slack for CW alarms." } +variable "resource_scope" { + type = string + description = < str: - """Sets the service URL based on service parameters derived from env vars. PR environments use internal-dev while - we also default to this environment. The only other exceptions are preprod which maps to the Apigee int environment - and prod which does not have a subdomain.""" - if not service_base_path: - service_base_path = DEFAULT_BASE_PATH - - if service_env is None or is_pr_env(service_env): - subdomain = "internal-dev." - elif service_env == "preprod": - subdomain = "int." - elif service_env == "prod": - subdomain = "" - else: - subdomain = f"{service_env}." - - return f"https://{subdomain}api.service.nhs.uk/{service_base_path}" - - -def is_pr_env(service_env: str | None) -> bool: - return service_env is not None and service_env.startswith(PR_ENV_PREFIX) def create_url_for_bundle_link( diff --git a/lambdas/backend/tests/service/test_search_url_helper.py b/lambdas/backend/tests/service/test_search_url_helper.py index cd37dc2a86..624f638882 100644 --- a/lambdas/backend/tests/service/test_search_url_helper.py +++ b/lambdas/backend/tests/service/test_search_url_helper.py @@ -3,32 +3,10 @@ import datetime import unittest -from service.search_url_helper import create_url_for_bundle_link, get_service_url +from service.search_url_helper import create_url_for_bundle_link class TestServiceUrl(unittest.TestCase): - def test_get_service_url(self): - """it should create service url""" - test_cases = [ - ("pr-123", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - (None, "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("preprod", "https://int.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("prod", "https://api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("ref", "https://ref.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("internal-dev", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("internal-qa", "https://internal-qa.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ] - mock_base_path = "immunisation-fhir-api/FHIR/R4" - - for mock_env, expected in test_cases: - with self.subTest(mock_env=mock_env, expected=expected): - self.assertEqual(get_service_url(mock_env, mock_base_path), expected) - - def test_get_service_url_uses_default_path_when_not_provided(self): - self.assertEqual( - get_service_url(None, None), "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4" - ) - def test_create_url_for_bundle_link_with_target_disease_uses_target_disease_param(self): url = create_url_for_bundle_link( immunization_targets=set(), diff --git a/lambdas/id_sync/src/record_processor.py b/lambdas/id_sync/src/record_processor.py index 615443fc38..3851422c1f 100644 --- a/lambdas/id_sync/src/record_processor.py +++ b/lambdas/id_sync/src/record_processor.py @@ -1,6 +1,7 @@ import json from typing import Any +from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from exceptions.id_sync_exception import IdSyncException from ieds_db_operations import ( @@ -9,7 +10,7 @@ get_items_from_patient_id, ieds_update_patient_id, ) -from pds_details import get_nhs_number_from_pds_resource, pds_get_patient_details +from pds_details import get_nhs_number_from_pds_resource from utils import make_status diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock index dd85f09247..69634eb57c 100644 --- a/lambdas/mns_publisher/poetry.lock +++ b/lambdas/mns_publisher/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aws-lambda-typing" @@ -12,6 +12,290 @@ files = [ {file = "aws_lambda_typing-2.20.0-py3-none-any.whl", hash = "sha256:1d44264cabfeab5ac38e67ddd0c874e677b2cbbae77a42d0519df470e6bbb49b"}, ] +[[package]] +name = "boto3" +version = "1.42.51" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "boto3-1.42.51-py3-none-any.whl", hash = "sha256:c3e75ab1c4df6b1049aecfae56d15f5ff99d68ec6a05f24741bab08ad5d5406e"}, + {file = "boto3-1.42.51.tar.gz", hash = "sha256:a010376cdc2432faa6c3338f04591142a1374da1b7eba94b80c0c7f1b525eff7"}, +] + +[package.dependencies] +botocore = ">=1.42.51,<1.43.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.16.0,<0.17.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.42.51" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "botocore-1.42.51-py3-none-any.whl", hash = "sha256:216c4c148f37f882c7239fce1d8023acdc664643952ce1d6827c7edc829903d3"}, + {file = "botocore-1.42.51.tar.gz", hash = "sha256:d7b03905b8066c25dd5bde1b7dc4af15ebdbaa313abbb2543db179b1d5efae3d"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.31.2)"] + +[[package]] +name = "cache" +version = "1.0.3" +description = "caching for humans" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "cache-1.0.3.tar.gz", hash = "sha256:ac063f2490c0794d5cf482bfff10b6339c441a6658f8f00fe653bd65b3ce85fb"}, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + [[package]] name = "coverage" version = "7.13.4" @@ -131,7 +415,548 @@ files = [ [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "cryptography" +version = "46.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +files = [ + {file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"}, + {file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"}, + {file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"}, + {file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"}, + {file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"}, + {file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"}, + {file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"}, + {file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"}, + {file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"}, + {file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.1.0" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64"}, + {file = "jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, +] + +[[package]] +name = "moto" +version = "5.1.21" +description = "A library that allows you to easily mock out tests based on AWS infrastructure" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "moto-5.1.21-py3-none-any.whl", hash = "sha256:311a30095b08b39dd2707f161f1440d361684fe0090b9fd0751dfd1c9b022445"}, + {file = "moto-5.1.21.tar.gz", hash = "sha256:713dde46e71e2714fa9a29eec513ec618d35e1d84c256331b5aab3f30692feeb"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +cryptography = ">=35.0.0" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0,<0.25.5 || >0.25.5" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +events = ["jsonpath_ng"] +glue = ["pyparsing (>=3.0.7)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +quicksight = ["jsonschema"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.3)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.3)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.42.41" +description = "Type annotations for boto3 DynamoDB 1.42.41 service generated with mypy-boto3-builder 8.12.0" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mypy_boto3_dynamodb-1.42.41-py3-none-any.whl", hash = "sha256:0e0f33d9babb17e7b1308e0dba3dcf1145115a0ceb354b5426e44cc68c44a5a1"}, + {file = "mypy_boto3_dynamodb-1.42.41.tar.gz", hash = "sha256:6102c5ecf25b1ef485274ca9c6af79eb76f66200cd075515edd2b96565f9892d"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.12\""} + +[[package]] +name = "pycparser" +version = "3.0" +description = "C parser in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, + {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, +] + +[[package]] +name = "pyjwt" +version = "2.11.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, + {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==7.10.7)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=8.4.2,<9.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.26.0" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37"}, + {file = "responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "s3transfer" +version = "0.16.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe"}, + {file = "s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920"}, +] + +[package.dependencies] +botocore = ">=1.37.4,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + +[[package]] +name = "werkzeug" +version = "3.1.5" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc"}, + {file = "werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67"}, +] + +[package.dependencies] +markupsafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "1.0.3" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "xmltodict-1.0.3-py3-none-any.whl", hash = "sha256:35d65d5c08f2a1121df338a0c4e49ca638480fa7c1b899ded45e0759bf32e40e"}, + {file = "xmltodict-1.0.3.tar.gz", hash = "sha256:3bf1f49c7836df34cf6d9cc7e690c4351f7dfff2ab0b8a1988bba4a9b9474909"}, +] + +[package.extras] +test = ["pytest", "pytest-cov"] + [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "0a2b2f2ca62bb0da43789e3a2c1c8c943545f1461b988de1c012d9de64cae545" +content-hash = "06d376648a4c5e1c740cd23a0bb066222c08baebca729d2eb422fe6b53e686bc" diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml index 009b8bc9a0..2bafd9e372 100644 --- a/lambdas/mns_publisher/pyproject.toml +++ b/lambdas/mns_publisher/pyproject.toml @@ -13,7 +13,18 @@ packages = [ python = "~3.11" aws-lambda-typing = "~2.20.0" coverage = "^7.13.2" +pyjwt = "^2.10.1" +requests = "^2.31.0" +boto3 = "~1.42.37" +mypy-boto3-dynamodb = "^1.42.33" +moto = "~5.1.20" +cache = "^1.0.3" [build-system] requires = ["poetry-core >= 1.5.0"] build-backend = "poetry.core.masonry.api" + +[dependency-groups] +dev = [ + "responses (>=0.26.0,<0.27.0)" +] diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py new file mode 100644 index 0000000000..28b968a190 --- /dev/null +++ b/lambdas/mns_publisher/src/constants.py @@ -0,0 +1,5 @@ +# Static constants for the MNS notification creation process +SPEC_VERSION = "1.0" +IMMUNISATION_TYPE = "imms-vaccinations-1" + +DYNAMO_DB_TYPE_DESCRIPTORS = ("S", "N", "BOOL", "M", "L") diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py new file mode 100644 index 0000000000..8aab0b4026 --- /dev/null +++ b/lambdas/mns_publisher/src/create_notification.py @@ -0,0 +1,130 @@ +import json +import os +import uuid +from datetime import datetime +from typing import Any + +from aws_lambda_typing.events.sqs import SQSMessage + +from common.api_clients.constants import MnsNotificationPayload +from common.api_clients.get_pds_details import pds_get_patient_details +from common.clients import logger +from common.get_service_url import get_service_url +from constants import DYNAMO_DB_TYPE_DESCRIPTORS, IMMUNISATION_TYPE, SPEC_VERSION + +IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") +IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") + + +def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: + """Create a notification payload for MNS.""" + immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) + + body = json.loads(sqs_event.get("body", "{}")) + new_image = body.get("dynamodb", {}).get("NewImage", {}) + imms_id = _unwrap_dynamodb_value(new_image.get("ImmsID", {})) + supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) + vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) + operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) + + imms_map = new_image.get("Imms", {}).get("M", {}) + nhs_number = _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})) + if not nhs_number: + logger.error("Missing required field: Nhs Number") + raise ValueError("NHS number is required to create MNS notification") + + person_dob = _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})) + date_and_time = _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})) + site_code = _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})) + + patient_age = calculate_age_at_vaccination(person_dob, date_and_time) + gp_ods_code = get_practitioner_details_from_pds(nhs_number) + + return { + "specversion": SPEC_VERSION, + "id": str(uuid.uuid4()), + "source": immunisation_url, + "type": IMMUNISATION_TYPE, + "time": date_and_time, + "subject": nhs_number, + "dataref": f"{immunisation_url}/Immunization/{imms_id}", + "filtering": { + "generalpractitioner": gp_ods_code, + "sourceorganisation": site_code, + "sourceapplication": supplier_system, + "subjectage": patient_age, + "immunisationtype": vaccine_type.upper(), + "action": operation, + }, + } + + +def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: + """ + Calculate patient age in years at time of vaccination. + Expects dates in format: YYYYMMDD or YYYYMMDDThhmmsszz + """ + birth_date_str = birth_date[:8] if len(birth_date) >= 8 else birth_date + vacc_date_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date + + date_of_birth = datetime.strptime(birth_date_str, "%Y%m%d") + date_of_vaccination = datetime.strptime(vacc_date_str, "%Y%m%d") + + age_in_year = date_of_vaccination.year - date_of_birth.year + if (date_of_vaccination.month, date_of_vaccination.day) < (date_of_birth.month, date_of_birth.day): + age_in_year -= 1 + + return age_in_year + + +def get_practitioner_details_from_pds(nhs_number: str) -> str | None: + patient_details = pds_get_patient_details(nhs_number) + if not patient_details: + logger.info("Unable to retrieve patient details") + return None + + general_practitioners = patient_details.get("generalPractitioner", []) + if not general_practitioners or len(general_practitioners) == 0: + logger.warning("No GP details found for patient") + return None + + patient_gp = general_practitioners[0] + patient_gp_identifier = patient_gp.get("identifier", {}) + + gp_ods_code = patient_gp_identifier.get("value") + if not gp_ods_code: + logger.warning("GP ODS code not found in practitioner details") + return None + + # Check if registration is current + period = patient_gp_identifier.get("period", {}) + gp_period_end_date = period.get("end", None) + + if gp_period_end_date: + # Parse end date (format: YYYY-MM-DD) + end_date = datetime.strptime(gp_period_end_date, "%Y-%m-%d").date() + today = datetime.now().date() + + if end_date < today: + logger.warning("No current GP registration found for patient") + return None + + return gp_ods_code + + +def _unwrap_dynamodb_value(value: dict) -> Any: + """ + Unwrap DynamoDB type descriptor to get the actual value. + DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL + """ + if not isinstance(value, dict): + return value + + if "NULL" in value: + return None + + for key in DYNAMO_DB_TYPE_DESCRIPTORS: + if key in value: + return value[key] + + return value diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 0dbec66812..81e1bff27c 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,10 +1,9 @@ from aws_lambda_typing import context, events +from process_records import process_records -def lambda_handler(event: events.SQSEvent, _: context.Context) -> bool: - event_records = event.get("Records", []) - for record in event_records: - print(record) +def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list]: + event_records = event.get("Records", []) - return True + return process_records(event_records) diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py new file mode 100644 index 0000000000..e55924d704 --- /dev/null +++ b/lambdas/mns_publisher/src/process_records.py @@ -0,0 +1,88 @@ +import json +import os +from typing import Tuple + +from aws_lambda_typing.events.sqs import SQSMessage + +from common.api_clients.mns_service import MnsService +from common.api_clients.mns_setup import get_mns_service +from common.api_clients.mock_mns_service import MockMnsService +from common.clients import logger +from create_notification import create_mns_notification + +mns_env = os.getenv("MNS_ENV", "int") +MNS_TEST_QUEUE_URL = os.getenv("MNS_TEST_QUEUE_URL") + + +def process_records(records: list[SQSMessage]) -> dict[str, list]: + """ + Process multiple SQS records. + Args: records: List of SQS records to process + Returns: List of failed item identifiers for partial batch failure + """ + batch_item_failures = [] + mns_service = get_mns_service(mns_env=mns_env) + + for record in records: + try: + process_record(record, mns_service) + except Exception: + message_id = record.get("messageId", "unknown") + batch_item_failures.append({"itemIdentifier": message_id}) + logger.exception("Failed to process record", extra={"message_id": message_id}) + + if batch_item_failures: + logger.warning(f"Batch completed with {len(batch_item_failures)} failures") + else: + logger.info(f"Successfully processed all {len(records)} messages") + + return {"batchItemFailures": batch_item_failures} + + +def process_record(record: SQSMessage, mns_service: MnsService | MockMnsService) -> None: + """ + Process a single SQS record. + Args: + record: SQS record containing DynamoDB stream data + mns_service: MNS service instance for publishing + Returns: Failure dict with itemIdentifier if processing failed, None if successful + """ + message_id, immunisation_id = extract_trace_ids(record) + notification_id = None + + mns_notification_payload = create_mns_notification(record) + notification_id = mns_notification_payload.get("id") + + action_flag = mns_notification_payload.get("filtering", {}).get("action") + logger.info( + "Processing message", + extra={ + "notification_id": notification_id, + "message_id": message_id, + "immunisation_id": immunisation_id, + "action_flag": action_flag, + }, + ) + + mns_service.publish_notification(mns_notification_payload) + logger.info("Successfully created MNS notification", extra={"mns_notification_id": notification_id}) + + +def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: + """ + Extract identifiers for tracing from SQS record. + Returns: Tuple of (message_id, immunisation_id) + """ + sqs_message_id = record.get("messageId", "unknown") + immunisation_id = None + + try: + sqs_event_body = record.get("body", {}) + if isinstance(sqs_event_body, str): + sqs_event_body = json.loads(sqs_event_body) + + immunisation_id = sqs_event_body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") + except Exception as e: + logger.warning(f"Could not extract immunisation_id: {immunisation_id}: {e}") + + return sqs_message_id, immunisation_id diff --git a/lambdas/mns_publisher/tests/sample_data/sqs_event.json b/lambdas/mns_publisher/tests/sample_data/sqs_event.json new file mode 100644 index 0000000000..0363906c7f --- /dev/null +++ b/lambdas/mns_publisher/tests/sample_data/sqs_event.json @@ -0,0 +1,118 @@ +{ + "messageId": "98ed30eb-829f-41df-8a73-57fef70cf161", + "receiptHandle": "AQEBpFIQq7dcCyEquMsKkFgM3iROiAVOLvq9CBwaFy7EkVHpqu5+leD7FEc/7KexUF91w8cZEn1XSSqUjapPq45SE7aAxzCOVjFHusYrYwcSBYg10mP60vXwVu3Qzp+F2T/52ONt75pStSJhm1fMXq6/ZkmYbpcTX2SLdL/5Yfx/rRo4uXFnPjo5VFMpH2yFDJnRnHJ4coHwCogvwuzp68cxU/zJOOaMKPQOCpYJMULkD8ITF/SAMWtzr6XSpgIWFUl+K9HFqDtljg5mv4oE34v9k+GRj0WNQVgjuSLCTYcGpYg75Kh6Rn9o7G9aH4fqczFQbzM0uYWmSSH2SNA4r6raupxTl8gXbG1Uzgq1rfhNxtMtvB4dSqR82je6IVf5lZ0Z+YTCy/Rqyr9SF9mDyFB5VjmqfN0MKENXKjJ/G7tqWoo=", + + "body": { + "eventID": "b1ba2a48eae68bf43a8cb49b400788c6", + "eventName": "INSERT", + "eventVersion": "1.1", + "eventSource": "aws:dynamodb", + "awsRegion": "eu-west-2", + + "dynamodb": { + "ApproximateCreationDateTime": 1770918337, + + "Keys": { + "PK": { "S": "a841e2c1dd0ecd2f60113890cc02b130" } + }, + + "NewImage": { + "ImmsID": { "S": "d058014c-b0fd-4471-8db9-3316175eb825" }, + "VaccineType": { "S": "hib" }, + "SupplierSystem": { "S": "TPP" }, + "DateTimeStamp": { "S": "2026-02-12T17:45:37+00:00" }, + + "Imms": { + "M": { + "UNIQUE_ID": { "S": "ae4f6b62-a419-41a8-b5e2-a5228b5f9e41" }, + "UNIQUE_ID_URI": { "S": "https://supplierABC/identifiers/vacc" }, + + "PERSON_FORENAME": { "S": "PEILL" }, + "PERSON_SURNAME": { "S": "LIZZY" }, + "PERSON_DOB": { "S": "20040609" }, + "PERSON_GENDER_CODE": { "S": "2" }, + "PERSON_POSTCODE": { "S": "M7 4ES" }, + "NHS_NUMBER": { "S": "9481152782" }, + + "PERFORMING_PROFESSIONAL_FORENAME": { "S": "Darren" }, + "PERFORMING_PROFESSIONAL_SURNAME": { "S": "Furlong" }, + + "VACCINE_TYPE": { "S": "hib" }, + "VACCINE_PRODUCT_CODE": { "S": "9903611000001100" }, + "VACCINE_PRODUCT_TERM": { + "S": "Menitorix powder and solvent for solution for injection 0.5ml vials (GlaxoSmithKline)" + }, + "VACCINE_MANUFACTURER": { "S": "Sanofi" }, + + "VACCINATION_PROCEDURE_CODE": { "S": "712833000" }, + "VACCINATION_PROCEDURE_TERM": { + "S": "Haemophilus influenzae type B Meningitis C (HibMenC) vaccination codes" + }, + + "INDICATION_CODE": { "S": "443684005" }, + + "SITE_OF_VACCINATION_CODE": { "S": "368208006" }, + "SITE_OF_VACCINATION_TERM": { + "S": "Left upper arm structure (body structure)" + }, + + "ROUTE_OF_VACCINATION_CODE": { "S": "78421000" }, + "ROUTE_OF_VACCINATION_TERM": { + "S": "Intramuscular route (qualifier value)" + }, + + "DOSE_SEQUENCE": { "S": "1" }, + "DOSE_AMOUNT": { "N": "0.3" }, + "DOSE_UNIT_CODE": { "S": "2622896019" }, + "DOSE_UNIT_TERM": { "S": "Inhalation - unit of product usage" }, + + "BATCH_NUMBER": { "S": "688346" }, + "EXPIRY_DATE": { "S": "20280212" }, + + "DATE_AND_TIME": { "S": "20260212T17443700" }, + "RECORDED_DATE": { "S": "20260212" }, + + "SITE_CODE": { "S": "B0C4P" }, + "SITE_CODE_TYPE_URI": { + "S": "https://fhir.nhs.uk/Id/ods-organization-code" + }, + + "LOCATION_CODE": { "S": "X99999" }, + "LOCATION_CODE_TYPE_URI": { + "S": "https://fhir.nhs.uk/Id/ods-organization-code" + }, + + "PRIMARY_SOURCE": { "S": "TRUE" }, + "ACTION_FLAG": { "S": "NEW" }, + + "CONVERSION_ERRORS": { "L": [] } + } + }, + + "Operation": { "S": "CREATE" }, + "PK": { "S": "a841e2c1dd0ecd2f60113890cc02b130" }, + "ExpiresAt": { "N": "1773510337" }, + "Source": { "S": "IEDS" } + }, + + "SequenceNumber": "42400003126610103283304", + "SizeBytes": 1463, + "StreamViewType": "NEW_IMAGE" + }, + + "eventSourceARN": "arn:aws:dynamodb:eu-west-2:345594581768:table/imms-pr-1203-delta/stream/2026-02-12T17:32:56.589" + }, + + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1770994394616", + "SenderId": "AROAVA5YK2MEDW5XLAPXH:634b0edba98233009bdd0a31c220a880", + "ApproximateFirstReceiveTimestamp": "1770994394620" + }, + + "messageAttributes": {}, + "md5OfBody": "f89442a426edfc37ca55f86e9cbb61bb", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:eu-west-2:345594581768:pr-1203-mns-outbound-events-queue", + "awsRegion": "eu-west-2" +} diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py new file mode 100644 index 0000000000..179b1ee28b --- /dev/null +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -0,0 +1,362 @@ +import copy +import json +import unittest +from unittest.mock import MagicMock, patch + +from constants import IMMUNISATION_TYPE, SPEC_VERSION +from create_notification import ( + _unwrap_dynamodb_value, + calculate_age_at_vaccination, + create_mns_notification, + get_practitioner_details_from_pds, +) +from test_utils import load_sample_sqs_event + + +class TestCalculateAgeAtVaccination(unittest.TestCase): + """Tests for age calculation at vaccination time.""" + + def test_age_calculation_yyyymmdd_format(self): + birth_date = "20040609" + vaccination_date = "20260212" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 21) + + def test_age_calculation_with_time(self): + birth_date = "20040609T120000" + vaccination_date = "20260212T174437" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 21) + + def test_age_calculation_after_birthday(self): + birth_date = "20040609" + vaccination_date = "20260815" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 22) + + def test_age_calculation_on_birthday(self): + birth_date = "20040609" + vaccination_date = "20260609" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 22) + + def test_age_calculation_infant(self): + birth_date = "20260609" + vaccination_date = "20260915" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 0) + + def test_age_calculation_leap_year_birthday(self): + birth_date = "20000229" + vaccination_date = "20240228" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 23) + + def test_age_calculation_same_day_different_year(self): + birth_date = "20000101" + vaccination_date = "20250101" + age = calculate_age_at_vaccination(birth_date, vaccination_date) + self.assertEqual(age, 25) + + +class TestCreateMnsNotification(unittest.TestCase): + """Tests for MNS notification creation.""" + + @classmethod + def setUpClass(cls): + cls.sample_sqs_event = load_sample_sqs_event() + + def setUp(self): + self.expected_gp_ods_code = "Y12345" + self.expected_immunisation_url = "https://int.api.service.nhs.uk/immunisation-fhir-api" + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + @patch("create_notification.uuid.uuid4") + def test_success_create_mns_notification_complete_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): + mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + self.assertEqual(result["specversion"], SPEC_VERSION) + self.assertEqual(result["type"], IMMUNISATION_TYPE) + self.assertEqual(result["source"], self.expected_immunisation_url) + self.assertEqual(result["subject"], "9481152782") + + expected_dataref = f"{self.expected_immunisation_url}/Immunization/d058014c-b0fd-4471-8db9-3316175eb825" + self.assertEqual(result["dataref"], expected_dataref) + + filtering = result["filtering"] + self.assertEqual(filtering["generalpractitioner"], self.expected_gp_ods_code) + self.assertEqual(filtering["sourceorganisation"], "B0C4P") + self.assertEqual(filtering["sourceapplication"], "TPP") + self.assertEqual(filtering["immunisationtype"], "HIB") + self.assertEqual(filtering["action"], "CREATE") + self.assertEqual(filtering["subjectage"], 21) + + self.assertIn("id", result) + self.assertIsInstance(result["id"], str) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_missing_nhs_number(self, mock_get_service_url, mock_get_gp): + sqs_event_data = copy.deepcopy(self.sample_sqs_event) + + body = json.loads(sqs_event_data["body"]) + body["dynamodb"]["NewImage"]["Imms"]["M"]["NHS_NUMBER"]["S"] = "" + sqs_event_data["body"] = json.dumps(body) + + with self.assertRaises(ValueError) as context: + create_mns_notification(sqs_event_data) + self.assertIn("NHS number is required", str(context.exception)) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_calls_get_practitioner_real_payload(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + create_mns_notification(self.sample_sqs_event) + + mock_get_gp.assert_called_once_with("9481152782") + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + invalid_event = {"messageId": "test-id", "body": "not valid json {"} + + with self.assertRaises(json.JSONDecodeError): + create_mns_notification(invalid_event) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.side_effect = Exception("PDS API unavailable") + + with self.assertRaises(Exception): + create_mns_notification(self.sample_sqs_event) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_gp_not_found(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = None + + result = create_mns_notification(self.sample_sqs_event) + + self.assertIsNone(result["filtering"]["generalpractitioner"]) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_required_fields_present(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + required_fields = ["id", "source", "specversion", "type", "time", "dataref", "subject"] + for field in required_fields: + self.assertIn(field, result, f"Required field '{field}' missing") + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_missing_imms_data_field(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + incomplete_event = { + "messageId": "test-id", + "body": json.dumps({"dynamodb": {"NewImage": {"ImmsID": {"S": "test-id"}}}}), + } + + with self.assertRaises((KeyError, TypeError, ValueError)): + create_mns_notification(incomplete_event) + + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_with_update_action(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + update_event = copy.deepcopy(self.sample_sqs_event) + + body = json.loads(update_event["body"]) + body["dynamodb"]["NewImage"]["Operation"]["S"] = "UPDATE" + update_event["body"] = json.dumps(body) + + result = create_mns_notification(update_event) + + self.assertEqual(result["filtering"]["action"], "UPDATE") + mock_get_service_url.assert_called() + mock_get_gp.assert_called() + + +class TestGetPractitionerDetailsFromPds(unittest.TestCase): + """Tests for get_practitioner_details_from_pds function.""" + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_success(self, mock_logger, mock_pds_get): + """Test successful retrieval of GP ODS code.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_pds_get.assert_called_once_with("9481152782") + mock_logger.warning.assert_not_called() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_gp_details(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is missing.""" + mock_pds_get.return_value = {"name": "John Doe"} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_once_with("No GP details found for patient") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_gp_is_none(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is None.""" + mock_pds_get.return_value = {"generalPractitioner": None} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_once() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): + """Test when value field is missing from identifier.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): + """Test when value is empty string.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": ""}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when no end date (current registration).""" + mock_pds_get.return_value = { + "generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_future_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when end date is in the future.""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2030-12-31"}}} + ] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_expired_registration(self, mock_logger, mock_pds_get): + """Test when GP registration has ended (expired).""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2020-01-01", "end": "2023-12-31"}}} + ] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with("No current GP registration found for patient") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_period_field(self, mock_logger, mock_pds_get): + """Test when period field is missing entirely.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): + """Test when PDS API raises exception.""" + mock_pds_get.side_effect = Exception("PDS API error") + + with self.assertRaises(Exception) as context: + get_practitioner_details_from_pds("9481152782") + + self.assertEqual(str(context.exception), "PDS API error") + + +class TestUnwrapDynamodbValue(unittest.TestCase): + """Tests for _unwrap_dynamodb_value helper function.""" + + def test_unwrap_string_type(self): + """Test unwrapping DynamoDB String type.""" + value = {"S": "test-value"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "test-value") + + def test_unwrap_number_type(self): + """Test unwrapping DynamoDB Number type.""" + value = {"N": "123"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "123") + + def test_unwrap_boolean_type(self): + """Test unwrapping DynamoDB Boolean type.""" + value = {"BOOL": True} + result = _unwrap_dynamodb_value(value) + self.assertTrue(result) + + def test_unwrap_null_type(self): + """Test unwrapping DynamoDB NULL type.""" + value = {"NULL": True} + result = _unwrap_dynamodb_value(value) + self.assertIsNone(result) + + def test_unwrap_map_type(self): + """Test unwrapping DynamoDB Map type.""" + value = {"M": {"key": {"S": "value"}}} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, {"key": {"S": "value"}}) + + def test_unwrap_list_type(self): + """Test unwrapping DynamoDB List type.""" + value = {"L": [{"S": "item1"}, {"S": "item2"}]} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index d6409ff2a4..1602e01214 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -1,9 +1,381 @@ -from unittest import TestCase -from unittest.mock import Mock +import json +import unittest +from unittest.mock import Mock, patch + +import boto3 +import responses +from moto import mock_aws from lambda_handler import lambda_handler +from process_records import extract_trace_ids, process_record, process_records +from test_utils import generate_private_key_b64, load_sample_sqs_event + + +class TestExtractTraceIds(unittest.TestCase): + """Tests for extract_trace_ids helper function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + cls.sample_sqs_event = load_sample_sqs_event() + + def test_extract_trace_ids_success_from_real_payload(self): + """Test successful extraction using real SQS event structure.""" + message_id, immunisation_id = extract_trace_ids(self.sample_sqs_event) + + self.assertEqual(message_id, "98ed30eb-829f-41df-8a73-57fef70cf161") + self.assertEqual(immunisation_id, "d058014c-b0fd-4471-8db9-3316175eb825") + + def test_extract_trace_ids_missing_message_id(self): + """Test extraction when messageId is missing.""" + record = {"body": json.dumps({"dynamodb": {"NewImage": {"ImmsID": {"S": "imms-456"}}}})} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "unknown") + self.assertEqual(immunisation_id, "imms-456") + + def test_extract_trace_ids_missing_body(self): + """Test extraction when body is missing.""" + record = {"messageId": "msg-123"} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + def test_extract_trace_ids_invalid_json_body(self): + """Test extraction when body contains invalid JSON.""" + record = {"messageId": "msg-123", "body": "not valid json"} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + def test_extract_trace_ids_missing_dynamodb_structure(self): + """Test extraction when DynamoDB structure is incomplete.""" + record = {"messageId": "msg-123", "body": json.dumps({"other": "data"})} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + +class TestProcessRecord(unittest.TestCase): + """Tests for process_record function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + cls.sample_sqs_record = load_sample_sqs_event() + + def setUp(self): + """Set up test fixtures.""" + self.sample_notification = { + "id": "notif-789", + "specversion": "1.0", + "type": "imms-vaccinations-1", + "filtering": {"action": "CREATE"}, + } + self.mock_mns_service = Mock() + + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_success(self, mock_logger, mock_create_notification): + """Test successful processing of a single record.""" + mock_create_notification.return_value = self.sample_notification + self.mock_mns_service.publish_notification.return_value = None + + process_record(self.sample_sqs_record, self.mock_mns_service) + + mock_create_notification.assert_called_once_with(self.sample_sqs_record) + self.mock_mns_service.publish_notification.assert_called_once_with(self.sample_notification) + mock_logger.exception.assert_not_called() + + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_create_notification_failure(self, mock_logger, mock_create_notification): + """Test handling when notification creation fails.""" + mock_create_notification.side_effect = Exception("Creation error") + + with self.assertRaises(Exception): + process_record(self.sample_sqs_record, self.mock_mns_service) + + self.mock_mns_service.publish_notification.assert_not_called() + + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_publish_failure(self, mock_logger, mock_create_notification): + """Test handling when MNS publish fails.""" + mock_create_notification.return_value = self.sample_notification + self.mock_mns_service.publish_notification.side_effect = Exception("Publish error") + + with self.assertRaises(Exception): + process_record(self.sample_sqs_record, self.mock_mns_service) + + +class TestProcessRecords(unittest.TestCase): + """Tests for process_records function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + cls.sample_sqs_record = load_sample_sqs_event() + + @patch("process_records.logger") + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_all_success(self, mock_process_record, mock_get_mns, mock_logger): + """Test processing multiple records with all successes.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + mock_process_record.return_value = None + + record_2 = self.sample_sqs_record.copy() + record_2["messageId"] = "different-id" + records = [self.sample_sqs_record, record_2] + + result = process_records(records) + + self.assertEqual(result, {"batchItemFailures": []}) + self.assertEqual(mock_process_record.call_count, 2) + mock_get_mns.assert_called_once() + mock_logger.info.assert_called_with("Successfully processed all 2 messages") + + @patch("process_records.logger") + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_partial_failure(self, mock_process_record, mock_get_mns, mock_logger): + """Test processing with some failures.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + mock_process_record.side_effect = [ + None, + Exception("Processing error"), + ] + + record_2 = self.sample_sqs_record.copy() + record_2["messageId"] = "msg-456" + records = [self.sample_sqs_record, record_2] + + result = process_records(records) + + self.assertEqual(len(result["batchItemFailures"]), 1) + self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") + mock_logger.warning.assert_called_with("Batch completed with 1 failures") + + @patch("process_records.logger") + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_empty_list(self, mock_process_record, mock_get_mns, mock_logger): + """Test processing empty record list.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + + result = process_records([]) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_process_record.assert_not_called() + mock_logger.info.assert_called_with("Successfully processed all 0 messages") + + @patch("process_records.logger") + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_mns_service_created_once(self, mock_process_record, mock_get_mns, mock_logger): + """Test that MNS service is created only once for batch.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + mock_process_record.return_value = None + + records = [self.sample_sqs_record, self.sample_sqs_record, self.sample_sqs_record] + + process_records(records) + + mock_get_mns.assert_called_once() + + +class TestLambdaHandler(unittest.TestCase): + """Tests for lambda_handler function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + cls.sample_sqs_record = load_sample_sqs_event() + + @patch("lambda_handler.process_records") + def test_lambda_handler_all_success(self, mock_process_records): + """Test lambda handler with all records succeeding.""" + mock_process_records.return_value = {"batchItemFailures": []} + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_process_records.assert_called_once_with([self.sample_sqs_record]) + + @patch("lambda_handler.process_records") + def test_lambda_handler_with_failures(self, mock_process_records): + """Test lambda handler with some failures.""" + mock_process_records.return_value = {"batchItemFailures": [{"itemIdentifier": "msg-123"}]} + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": "msg-123"}]}) + + @patch("lambda_handler.process_records") + def test_lambda_handler_empty_records(self, mock_process_records): + """Test lambda handler with no records.""" + mock_process_records.return_value = {"batchItemFailures": []} + + event = {"Records": []} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_process_records.assert_called_once_with([]) + + +@mock_aws +class TestLambdaHandlerIntegration(unittest.TestCase): + """ + Integration tests + """ + + def setUp(self): + """Set up mocked AWS services and test data.""" + self.sample_sqs_record = load_sample_sqs_event() + self.secrets_client = boto3.client("secretsmanager", region_name="eu-west-2") + self.secrets_client.create_secret( + Name="imms/pds/int/jwt-secrets", + SecretString=json.dumps( + {"api_key": "fake-pds-api-key", "kid": "fake-kid-123", "private_key_b64": generate_private_key_b64()} + ), + ) + + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.logger") + def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_token): + """ + Test a Successful MNS Publish notification with calls to PDS for GP details, no batch failure + """ + + # Mock OAuth token response issued from Apigee + mock_get_token.return_value = {"access_token": "fake-token"} + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={"generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}]}, + status=200, + ) + + mns_response = responses.add( + responses.POST, + "https://int.api.service.nhs.uk/multicast-notification-service/events", + json={"id": "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b"}, + status=200, + ) + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + + self.assertEqual(mns_response.call_count, 1) + self.assertEqual(mns_response.calls[0].response.status_code, 200) + mns_payload = json.loads(mns_response.calls[0].request.body) + self.assertEqual(mns_payload["subject"], "9481152782") + self.assertEqual(mns_payload["filtering"]["generalpractitioner"], "Y12345") + self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") + self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "HIB") + self.assertEqual(mns_payload["filtering"]["action"], "CREATE") + self.assertEqual(mns_payload["filtering"]["subjectage"], 21) + + mock_logger.info.assert_any_call("Successfully processed all 1 messages") + + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.get_mns_service") + @patch("process_records.logger") + def test_pds_failure(self, mock_logger, mock_get_mns, mock_get_token): + """ + Test that a PDS client error results in a batch item failure and MNS is not called. + """ + + # Mock OAuth token response issued from Apigee + mock_get_token.return_value = "fake-token" + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={ + "resourceType": "OperationOutcome", + "issue": [{"severity": "error", "code": "processing", "diagnostics": "Patient not found"}], + }, + status=400, + ) + + mock_mns_service = Mock() + mock_mns_service.publish_notification.return_value = None + mock_get_mns.return_value = mock_mns_service + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(len(result["batchItemFailures"]), 1) + mock_mns_service.publish_notification.assert_not_called() + mock_logger.warning.assert_called_with("Batch completed with 1 failures") + + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.logger") + def test_successful_notification_creation_with_expired_gp(self, mock_logger, mock_get_token): + """ + Test a Successful MNS Publish notification with calls to PDS for GP details, no batch failure + """ + + # Mock OAuth token response issued from Apigee + mock_get_token.return_value = {"access_token": "fake-token"} + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={ + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2025-12-31"}}} + ] + }, + status=200, + ) + + mns_response = responses.add( + responses.POST, + "https://int.api.service.nhs.uk/multicast-notification-service/events", + json={"id": "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b"}, + status=200, + ) + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + self.assertEqual(mns_response.call_count, 1) + self.assertEqual(mns_response.calls[0].response.status_code, 200) + mns_payload = json.loads(mns_response.calls[0].request.body) + self.assertEqual(mns_payload["subject"], "9481152782") + self.assertEqual(mns_payload["filtering"]["generalpractitioner"], None) + self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") + self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "HIB") + self.assertEqual(mns_payload["filtering"]["action"], "CREATE") + self.assertEqual(mns_payload["filtering"]["subjectage"], 21) -class TestLambdaHandler(TestCase): - def test_lambda_handler_returns_true(self): - lambda_handler({"Records": [{"messageId": "1234"}]}, Mock()) + mock_logger.info.assert_any_call("Successfully processed all 1 messages") diff --git a/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/mns_publisher/tests/test_utils.py b/lambdas/mns_publisher/tests/test_utils.py new file mode 100644 index 0000000000..4c6a71c15f --- /dev/null +++ b/lambdas/mns_publisher/tests/test_utils.py @@ -0,0 +1,32 @@ +import base64 +import json +from pathlib import Path + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + + +def generate_private_key_b64() -> str: + # Generate a real RSA private key (PKCS8) and base64 encode the PEM + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + pem_bytes = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + return base64.b64encode(pem_bytes).decode("utf-8") + + +def load_sample_sqs_event() -> dict: + """ + Loads the sample SQS event and normalises body to a JSON string (as SQS delivers it). + Expects: lambdas/mns_publisher/tests/sqs_event.json + """ + sample_event_path = Path(__file__).parent / "sample_data" / "sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + return raw_event diff --git a/lambdas/mns_subscription/src/mns_setup.py b/lambdas/mns_subscription/src/mns_setup.py deleted file mode 100644 index f26b87ae4a..0000000000 --- a/lambdas/mns_subscription/src/mns_setup.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -import boto3 -from botocore.config import Config - -from common.api_clients.authentication import AppRestrictedAuth, Service -from common.api_clients.mns_service import MnsService -from common.cache import Cache - -logging.basicConfig(level=logging.INFO) - - -def get_mns_service(mns_env: str = "int"): - boto_config = Config(region_name="eu-west-2") - cache = Cache(directory="/tmp") - logging.info("Creating authenticator...") - # VED-1087 TODO: MNS and PDS need separate secrets - authenticator = AppRestrictedAuth( - service=Service.PDS, - secret_manager_client=boto3.client("secretsmanager", config=boto_config), - environment=mns_env, - cache=cache, - ) - - logging.info("Authentication Initiated...") - return MnsService(authenticator) diff --git a/lambdas/mns_subscription/src/subscribe_mns.py b/lambdas/mns_subscription/src/subscribe_mns.py index 111871df0e..7fe0cd7c3c 100644 --- a/lambdas/mns_subscription/src/subscribe_mns.py +++ b/lambdas/mns_subscription/src/subscribe_mns.py @@ -1,7 +1,7 @@ import logging import os -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") diff --git a/lambdas/mns_subscription/src/unsubscribe_mns.py b/lambdas/mns_subscription/src/unsubscribe_mns.py index 1022cea2ee..10780503ae 100644 --- a/lambdas/mns_subscription/src/unsubscribe_mns.py +++ b/lambdas/mns_subscription/src/unsubscribe_mns.py @@ -1,7 +1,7 @@ import logging import os -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") diff --git a/lambdas/shared/src/common/api_clients/constants.py b/lambdas/shared/src/common/api_clients/constants.py index 3daf003190..aa305f146f 100644 --- a/lambdas/shared/src/common/api_clients/constants.py +++ b/lambdas/shared/src/common/api_clients/constants.py @@ -1,5 +1,9 @@ +from typing import TypedDict + """Constants used by API clients""" +DEV_ENVIRONMENT = "dev" + class Constants: """Constants used for the API clients""" @@ -8,3 +12,28 @@ class Constants: DEFAULT_API_CLIENTS_TIMEOUT = 5 API_CLIENTS_MAX_RETRIES = 2 API_CLIENTS_BACKOFF_SECONDS = 0.5 + + +# Fields from the incoming SQS message that forms part of the base schema and filtering attributes for MNS notifications +class FilteringData(TypedDict): + """MNS notification filtering attributes.""" + + generalpractitioner: str | None + sourceorganisation: str + sourceapplication: str + subjectage: int + immunisationtype: str + action: str + + +class MnsNotificationPayload(TypedDict): + """CloudEvents-compliant MNS notification payload.""" + + specversion: str + id: str + source: str + type: str + time: str + subject: str + dataref: str + filtering: FilteringData diff --git a/lambdas/shared/src/common/api_clients/errors.py b/lambdas/shared/src/common/api_clients/errors.py index 3fc3502642..4329e118d8 100644 --- a/lambdas/shared/src/common/api_clients/errors.py +++ b/lambdas/shared/src/common/api_clients/errors.py @@ -157,6 +157,14 @@ def to_operation_outcome(self) -> dict: ) +class PdsSyncException(Exception): + """Custom exception for Pds Sync errors.""" + + def __init__(self, message: str): + self.message = message + super().__init__(message) + + def raise_error_response(response): error_mapping = { 401: (TokenValidationError, "Token validation failed for the request"), diff --git a/lambdas/shared/src/common/api_clients/get_pds_details.py b/lambdas/shared/src/common/api_clients/get_pds_details.py new file mode 100644 index 0000000000..63844b3cda --- /dev/null +++ b/lambdas/shared/src/common/api_clients/get_pds_details.py @@ -0,0 +1,34 @@ +""" +Operations related to PDS (Patient Demographic Service) +""" + +import os +import tempfile + +from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.errors import PdsSyncException +from common.api_clients.pds_service import PdsService +from common.cache import Cache +from common.clients import get_secrets_manager_client, logger + +PDS_ENV = os.getenv("PDS_ENV", "int") +safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") # NOSONAR(S5443) + + +# Get Patient details from external service PDS using NHS number from MNS notification +def pds_get_patient_details(nhs_number: str) -> dict: + try: + cache = Cache(directory=safe_tmp_dir) + authenticator = AppRestrictedAuth( + service=Service.PDS, + secret_manager_client=get_secrets_manager_client(), + environment=PDS_ENV, + cache=cache, + ) + pds_service = PdsService(authenticator, PDS_ENV) + patient = pds_service.get_patient_details(nhs_number) + return patient + except Exception as e: + msg = "Error retrieving patient details from PDS" + logger.exception(msg) + raise PdsSyncException(message=msg) from e diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 32fc12344a..417fef6ad5 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -6,16 +6,19 @@ import requests from common.api_clients.authentication import AppRestrictedAuth +from common.api_clients.constants import MnsNotificationPayload from common.api_clients.errors import raise_error_response from common.api_clients.retry import request_with_retry_backoff SQS_ARN = os.getenv("SQS_ARN") apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") -MNS_URL = ( - "https://api.service.nhs.uk/multicast-notification-service/subscriptions" - if apigee_env == "prod" - else "https://int.api.service.nhs.uk/multicast-notification-service/subscriptions" +mns_env = os.getenv("MNS_ENV", "int") +env = apigee_env or mns_env +MNS_BASE_URL = ( + "https://api.service.nhs.uk/multicast-notification-service" + if env == "prod" + else "https://int.api.service.nhs.uk/multicast-notification-service" ) @@ -23,16 +26,25 @@ class MnsService: def __init__(self, authenticator: AppRestrictedAuth): self.authenticator = authenticator self.access_token = self.authenticator.get_access_token() - self.request_headers = { - "Content-Type": "application/fhir+json", - "Authorization": f"Bearer {self.access_token}", - "X-Correlation-ID": str(uuid.uuid4()), - } - self.subscription_payload = { + logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") + + def _build_subscription_payload(self, event_type: str, reason: str | None = None, status: str = "requested") -> dict: + """ + Builds subscription payload. + Args: + event_type: Event type to subscribe to (e.g., 'imms-vaccinations-2', 'nhs-number-change-2') + reason: Optional description of the subscription + status: Subscription status (default: 'requested') + Returns: Subscription payload dict + """ + if not reason: + reason = f"Subscribe SQS to {event_type} events" + + return { "resourceType": "Subscription", - "status": "requested", - "reason": "Subscribe SQS to NHS Number Change Events", - "criteria": "eventType=nhs-number-change-2", + "status": status, + "reason": reason, + "criteria": f"eventType={event_type}", "channel": { "type": "message", "endpoint": SQS_ARN, @@ -40,21 +52,34 @@ def __init__(self, authenticator: AppRestrictedAuth): }, } - logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") + def _build_headers(self, content_type: str = "application/fhir+json") -> dict: + """Build request headers with authentication and correlation ID.""" + return { + "Content-Type": content_type, + "Authorization": f"Bearer {self.access_token}", + "X-Correlation-ID": str(uuid.uuid4()), + } - def subscribe_notification(self) -> dict | None: + def subscribe_notification(self, event_type: str = "nhs-number-change-2", reason: str | None = None) -> dict | None: + subscription_payload = self._build_subscription_payload(event_type, reason) response = requests.request( - "POST", MNS_URL, headers=self.request_headers, timeout=15, data=json.dumps(self.subscription_payload) + "POST", + f"{MNS_BASE_URL}/subscriptions", + headers=self._build_headers(), + timeout=15, + data=json.dumps(subscription_payload), ) + if response.status_code in (200, 201): return response.json() else: raise_error_response(response) def get_subscription(self) -> dict | None: - response = request_with_retry_backoff("GET", MNS_URL, headers=self.request_headers, timeout=10) - logging.info(f"GET {MNS_URL}") - logging.debug(f"Headers: {self.request_headers}") + """Retrieve existing subscription for this SQS ARN.""" + headers = self._build_headers() + response = request_with_retry_backoff("GET", f"{MNS_BASE_URL}/subscriptions", headers, timeout=10) + logging.info(f"GET {MNS_BASE_URL}/subscriptions") if response.status_code == 200: bundle = response.json() @@ -89,8 +114,8 @@ def check_subscription(self) -> dict: def delete_subscription(self, subscription_id: str) -> str: """Delete the subscription by ID.""" - url = f"{MNS_URL}/{subscription_id}" - response = request_with_retry_backoff("DELETE", url, headers=self.request_headers, timeout=10) + url = f"{MNS_BASE_URL}/subscriptions/{subscription_id}" + response = request_with_retry_backoff("DELETE", url, headers=self._build_headers(), timeout=10) if response.status_code == 204: logging.info(f"Deleted subscription {subscription_id}") return "Subscription Successfully Deleted..." @@ -111,3 +136,16 @@ def check_delete_subscription(self): return "Subscription successfully deleted" except Exception as e: return f"Error deleting subscription: {str(e)}" + + def publish_notification(self, notification_payload: MnsNotificationPayload) -> dict | None: + response = requests.request( + "POST", + f"{MNS_BASE_URL}/events", + headers=self._build_headers(content_type="application/cloudevents+json"), + timeout=15, + data=json.dumps(notification_payload), + ) + if response.status_code == 200: + return response.json() + else: + raise_error_response(response) diff --git a/lambdas/shared/src/common/api_clients/mns_setup.py b/lambdas/shared/src/common/api_clients/mns_setup.py new file mode 100644 index 0000000000..5cecd4440f --- /dev/null +++ b/lambdas/shared/src/common/api_clients/mns_setup.py @@ -0,0 +1,32 @@ +import logging +import os + +import boto3 +from botocore.config import Config + +from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.constants import DEV_ENVIRONMENT +from common.api_clients.mns_service import MnsService +from common.api_clients.mock_mns_service import MockMnsService +from common.cache import Cache + +logging.basicConfig(level=logging.INFO) +MNS_TEST_QUEUE_URL = os.getenv("MNS_TEST_QUEUE_URL") + + +def get_mns_service(mns_env: str = "int"): + if mns_env == DEV_ENVIRONMENT: + logging.info("Dev environment: Using MockMnsService") + return MockMnsService(MNS_TEST_QUEUE_URL) + else: + boto_config = Config(region_name="eu-west-2") + cache = Cache(directory="/tmp") + logging.info("Creating authenticator...") + authenticator = AppRestrictedAuth( + service=Service.PDS, + secret_manager_client=boto3.client("secretsmanager", config=boto_config), + environment=mns_env, + cache=cache, + ) + logging.info("Authentication Initiated...") + return MnsService(authenticator) diff --git a/lambdas/shared/src/common/api_clients/mock_mns_service.py b/lambdas/shared/src/common/api_clients/mock_mns_service.py new file mode 100644 index 0000000000..5c922b1230 --- /dev/null +++ b/lambdas/shared/src/common/api_clients/mock_mns_service.py @@ -0,0 +1,34 @@ +import json +import os + +import boto3 + +from common.api_clients.constants import MnsNotificationPayload +from common.clients import logger + +REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") + + +class MockMnsService: + def __init__(self, queue_url): + self.queue_url = queue_url + self.sqs_client = boto3.client("sqs", region_name=REGION_NAME) + logger.info(f"MockMnsService initialized with queue: {queue_url}") + + def publish_notification(self, mns_payload: MnsNotificationPayload) -> None: + """ + Send MNS notification payload to test SQS queue as fallback. + Args: payload: MNS notification payload + """ + try: + response = self.sqs_client.send_message( + QueueUrl=self.queue_url, + MessageBody=json.dumps(mns_payload), + MessageAttributes={"source": {"StringValue": "mns-publisher-lambda", "DataType": "String"}}, + ) + logger.info( + "Mock MNS: Successfully sent notification to test queue", extra={"message_id": response["MessageId"]} + ) + except Exception: + logger.exception("Mock MNS: Failed to send to test SQS queue") + raise diff --git a/lambdas/shared/src/common/constants.py b/lambdas/shared/src/common/constants.py new file mode 100644 index 0000000000..060a4ca472 --- /dev/null +++ b/lambdas/shared/src/common/constants.py @@ -0,0 +1,2 @@ +DEFAULT_BASE_PATH = "immunisation-fhir-api/FHIR/R4" +PR_ENV_PREFIX = "pr-" diff --git a/lambdas/shared/src/common/get_service_url.py b/lambdas/shared/src/common/get_service_url.py new file mode 100644 index 0000000000..9188c07509 --- /dev/null +++ b/lambdas/shared/src/common/get_service_url.py @@ -0,0 +1,26 @@ +from typing import Optional + +from common.constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX + + +def get_service_url(service_env: Optional[str], service_base_path: Optional[str]) -> str: + """Sets the service URL based on service parameters derived from env vars. PR environments use internal-dev while + we also default to this environment. The only other exceptions are preprod which maps to the Apigee int environment + and prod which does not have a subdomain.""" + if not service_base_path: + service_base_path = DEFAULT_BASE_PATH + + if service_env is None or is_pr_env(service_env): + subdomain = "internal-dev." + elif service_env == "preprod": + subdomain = "int." + elif service_env == "prod": + subdomain = "" + else: + subdomain = f"{service_env}." + + return f"https://{subdomain}api.service.nhs.uk/{service_base_path}" + + +def is_pr_env(service_env: Optional[str]) -> bool: + return service_env is not None and service_env.startswith(PR_ENV_PREFIX) diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py index 82c7c6cae7..3cc9daab9e 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py @@ -12,7 +12,7 @@ UnhandledResponseError, raise_error_response, ) -from common.api_clients.mns_service import MNS_URL, MnsService +from common.api_clients.mns_service import MnsService SQS_ARN = "arn:aws:sqs:eu-west-2:123456789012:my-queue" @@ -138,18 +138,34 @@ def test_check_subscription_creates_if_not_found(self, mock_request): self.assertEqual(result, {"subscriptionId": "abc123"}) self.assertEqual(mock_request.call_count, 2) - @patch("common.api_clients.mns_service.requests.request") - def test_delete_subscription_success(self, mock_delete): + @patch("common.api_clients.mns_service.request_with_retry_backoff") + def test_delete_subscription_success(self, mock_retry_request): + """Test successful subscription deletion.""" mock_response = MagicMock() mock_response.status_code = 204 - mock_delete.return_value = mock_response + mock_retry_request.return_value = mock_response service = MnsService(self.authenticator) result = service.delete_subscription("sub-id-123") - self.assertTrue(result) - mock_delete.assert_called_with( - method="DELETE", url=f"{MNS_URL}/sub-id-123", headers=service.request_headers, timeout=10 - ) + + self.assertEqual(result, "Subscription Successfully Deleted...") + + # Verify the request was made correctly + mock_retry_request.assert_called_once() + + # Get call arguments + args, kwargs = mock_retry_request.call_args + + # Verify method and URL + self.assertEqual(args[0], "DELETE") + self.assertIn("/subscriptions/sub-id-123", args[1]) + + # Verify headers exist + self.assertIn("headers", kwargs) + self.assertIn("Authorization", kwargs["headers"]) + + # Verify timeout + self.assertEqual(kwargs["timeout"], 10) @patch("common.api_clients.mns_service.requests.request") def test_delete_subscription_401(self, mock_delete): @@ -277,6 +293,49 @@ def test_unhandled_status_code(self): self.assertIn("Unhandled error: 418", str(context.exception)) self.assertEqual(context.exception.response, {"resource": 1234}) + @patch("common.api_clients.mns_service.requests.request") + def test_publish_notification_success(self, mock_request): + """Test successful notification publishing.""" + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"status": "published"} + mock_request.return_value = mock_response + + notification_payload = { + "specversion": "1.0", + "id": "test-id", + "type": "imms-vaccinations-2", + "source": "test-source", + } + + service = MnsService(self.authenticator) + result = service.publish_notification(notification_payload) + + self.assertEqual(result["status"], "published") + + # Verify the request was made correctly + mock_request.assert_called_once() + call_args = mock_request.call_args + + headers = call_args[1]["headers"] + self.assertEqual(headers["Content-Type"], "application/cloudevents+json") + mock_request.assert_called_once() + + @patch("common.api_clients.mns_service.requests.request") + @patch("common.api_clients.mns_service.raise_error_response") + def test_publish_notification_failure(self, mock_raise_error, mock_request): + """Test notification publishing failure.""" + mock_response = Mock() + mock_response.status_code = 400 + mock_request.return_value = mock_response + + notification_payload = {"id": "test-id"} + + service = MnsService(self.authenticator) + service.publish_notification(notification_payload) + + mock_raise_error.assert_called_once_with(mock_response) + if __name__ == "__main__": unittest.main() diff --git a/lambdas/mns_subscription/tests/test_mns_setup.py b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py similarity index 79% rename from lambdas/mns_subscription/tests/test_mns_setup.py rename to lambdas/shared/tests/test_common/api_clients/test_mns_setup.py index 53aa67941a..06fe1959cf 100644 --- a/lambdas/mns_subscription/tests/test_mns_setup.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py @@ -1,13 +1,13 @@ import unittest from unittest.mock import MagicMock, patch -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service class TestGetMnsService(unittest.TestCase): - @patch("mns_setup.boto3.client") - @patch("mns_setup.AppRestrictedAuth") - @patch("mns_setup.MnsService") + @patch("common.api_clients.mns_setup.boto3.client") + @patch("common.api_clients.mns_setup.AppRestrictedAuth") + @patch("common.api_clients.mns_setup.MnsService") def test_get_mns_service(self, mock_mns_service, mock_app_auth, mock_boto_client): # Arrange mock_auth_instance = MagicMock() diff --git a/lambdas/shared/tests/test_common/api_clients/test_mock_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mock_mns_service.py new file mode 100644 index 0000000000..13e22deeda --- /dev/null +++ b/lambdas/shared/tests/test_common/api_clients/test_mock_mns_service.py @@ -0,0 +1,96 @@ +import json +import unittest + +import boto3 +from moto import mock_aws + +from common.api_clients.mock_mns_service import MockMnsService + + +@mock_aws +class TestMockMnsService(unittest.TestCase): + """Tests for MockMnsService (dev environment).""" + + def setUp(self): + """Set up mocked SQS queue and test payload.""" + # Create mock SQS queue + self.sqs = boto3.client("sqs", region_name="eu-west-2") + response = self.sqs.create_queue(QueueName="mns-test-notifications-dev") + self.queue_url = response["QueueUrl"] + + self.mns_payload = { + "specversion": "1.0", + "id": "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b", + "source": "https://int.api.service.nhs.uk/immunisation-fhir-api", + "type": "imms-vaccinations-2", + "time": "20260212T174437+00:00", + "subject": "9481152782", + "dataref": "https://int.api.service.nhs.uk/immunisation-fhir-api/Immunization/d058014c-b0fd-4471-8db9-3316175eb825", + "filtering": { + "generalpractitioner": "Y12345", + "sourceorganisation": "B0C4P", + "sourceapplication": "TPP", + "subjectage": 21, + "immunisationtype": "HIB", + "action": "CREATE", + }, + } + + def test_publish_notification_success(self): + """Test MockMnsService successfully publishes to SQS queue.""" + # Create mock service with queue URL + mock_service = MockMnsService(queue_url=self.queue_url) + + # Publish notification + mock_service.publish_notification(self.mns_payload) + + # Verify message was sent to queue + messages = self.sqs.receive_message( + QueueUrl=self.queue_url, MaxNumberOfMessages=1, MessageAttributeNames=["All"] + ) + + # Assert message exists + self.assertIn("Messages", messages) + self.assertEqual(len(messages["Messages"]), 1) + + # Verify message body + message_body = json.loads(messages["Messages"][0]["Body"]) + self.assertEqual(message_body["id"], "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") + self.assertEqual(message_body["subject"], "9481152782") + self.assertEqual(message_body["filtering"]["generalpractitioner"], "Y12345") + self.assertEqual(message_body["filtering"]["sourceorganisation"], "B0C4P") + self.assertEqual(message_body["filtering"]["sourceapplication"], "TPP") + self.assertEqual(message_body["filtering"]["immunisationtype"], "HIB") + self.assertEqual(message_body["filtering"]["action"], "CREATE") + self.assertEqual(message_body["filtering"]["subjectage"], 21) + + # Verify message attributes + attributes = messages["Messages"][0]["MessageAttributes"] + self.assertEqual(attributes["source"]["StringValue"], "mns-publisher-lambda") + + def test_publish_notification_multiple_messages(self): + """Test MockMnsService handles multiple publications.""" + mock_service = MockMnsService(queue_url=self.queue_url) + + # Publish multiple notifications + payload1 = {**self.mns_payload, "id": "notification-1"} + payload2 = {**self.mns_payload, "id": "notification-2"} + + mock_service.publish_notification(payload1) + mock_service.publish_notification(payload2) + + # Verify both messages in queue + messages = self.sqs.receive_message(QueueUrl=self.queue_url, MaxNumberOfMessages=10) + + self.assertEqual(len(messages["Messages"]), 2) + + message_ids = [json.loads(msg["Body"])["id"] for msg in messages["Messages"]] + self.assertIn("notification-1", message_ids) + self.assertIn("notification-2", message_ids) + + def test_publish_notification_sqs_failure(self): + """Test MockMnsService raises exception on SQS failure.""" + # Use invalid queue URL + mock_service = MockMnsService(queue_url="queue_url=invalid_queue_url") + with self.assertRaises(Exception): + mock_service.publish_notification(self.mns_payload) diff --git a/lambdas/id_sync/tests/test_pds_details.py b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py similarity index 86% rename from lambdas/id_sync/tests/test_pds_details.py rename to lambdas/shared/tests/test_common/api_clients/test_pds_details.py index 442cda106a..f833c10d07 100644 --- a/lambdas/id_sync/tests/test_pds_details.py +++ b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py @@ -1,8 +1,8 @@ import unittest from unittest.mock import MagicMock, patch -from exceptions.id_sync_exception import IdSyncException -from pds_details import get_nhs_number_from_pds_resource, pds_get_patient_details +from common.api_clients.errors import PdsSyncException +from common.api_clients.get_pds_details import pds_get_patient_details class TestGetPdsPatientDetails(unittest.TestCase): @@ -11,27 +11,27 @@ def setUp(self): self.test_patient_id = "9912003888" # Patch all external dependencies - self.logger_patcher = patch("pds_details.logger") + self.logger_patcher = patch("common.api_clients.get_pds_details.logger") self.mock_logger = self.logger_patcher.start() self.secrets_manager_patcher = patch("common.clients.global_secrets_manager_client") self.mock_secrets_manager = self.secrets_manager_patcher.start() - self.pds_env_patcher = patch("pds_details.get_pds_env") + self.pds_env_patcher = patch("os.getenv") self.mock_pds_env = self.pds_env_patcher.start() self.mock_pds_env.return_value = "test-env" - self.cache_patcher = patch("pds_details.Cache") + self.cache_patcher = patch("common.api_clients.get_pds_details.Cache") self.mock_cache_class = self.cache_patcher.start() self.mock_cache_instance = MagicMock() self.mock_cache_class.return_value = self.mock_cache_instance - self.auth_patcher = patch("pds_details.AppRestrictedAuth") + self.auth_patcher = patch("common.api_clients.get_pds_details.AppRestrictedAuth") self.mock_auth_class = self.auth_patcher.start() self.mock_auth_instance = MagicMock() self.mock_auth_class.return_value = self.mock_auth_instance - self.pds_service_patcher = patch("pds_details.PdsService") + self.pds_service_patcher = patch("common.api_clients.get_pds_details.PdsService") self.mock_pds_service_class = self.pds_service_patcher.start() self.mock_pds_service_instance = MagicMock() self.mock_pds_service_class.return_value = self.mock_pds_service_instance @@ -94,7 +94,7 @@ def test_pds_get_patient_details_pds_service_exception(self): self.mock_pds_service_instance.get_patient_details.side_effect = mock_exception # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) exception = context.exception @@ -116,7 +116,7 @@ def test_pds_get_patient_details_cache_initialization_error(self): self.mock_cache_class.side_effect = OSError("Cannot write to /tmp") # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) # Assert @@ -137,7 +137,7 @@ def test_pds_get_patient_details_auth_initialization_error(self): self.mock_auth_class.side_effect = ValueError("Invalid authentication parameters") # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) # Assert @@ -207,18 +207,3 @@ def test_pds_get_patient_details(self): # Assert - function should extract the value from first identifier self.assertEqual(result, mock_pds_response) self.mock_pds_service_instance.get_patient_details.assert_called_once_with(test_nhs_number) - - def test_get_nhs_number_from_pds_resource(self): - """Test that the NHS Number is retrieved from a full PDS patient resource.""" - mock_pds_resource = { - "identifier": [ - { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "123456789012", - } - ] - } - - result = get_nhs_number_from_pds_resource(mock_pds_resource) - - self.assertEqual(result, "123456789012") diff --git a/lambdas/shared/tests/test_common/test_get_service_url.py b/lambdas/shared/tests/test_common/test_get_service_url.py new file mode 100644 index 0000000000..e7d7fc03c9 --- /dev/null +++ b/lambdas/shared/tests/test_common/test_get_service_url.py @@ -0,0 +1,29 @@ +"""Tests for the search_url_helper file""" + +import unittest + +from common.get_service_url import get_service_url + + +class TestServiceUrl(unittest.TestCase): + def test_get_service_url(self): + """it should create service url""" + test_cases = [ + ("pr-123", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + (None, "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("preprod", "https://int.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("prod", "https://api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("ref", "https://ref.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("internal-dev", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("internal-qa", "https://internal-qa.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ] + mock_base_path = "immunisation-fhir-api/FHIR/R4" + + for mock_env, expected in test_cases: + with self.subTest(mock_env=mock_env, expected=expected): + self.assertEqual(get_service_url(mock_env, mock_base_path), expected) + + def test_get_service_url_uses_default_path_when_not_provided(self): + self.assertEqual( + get_service_url(None, None), "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4" + ) From 0be825405492c8022d9429457705a4dc4670db4b Mon Sep 17 00:00:00 2001 From: amarauzoma Date: Fri, 13 Mar 2026 12:54:19 +0000 Subject: [PATCH 04/14] VED-000: Refactor Api_clients Authentication (#1278) --- lambdas/id_sync/src/pds_details.py | 31 ---- lambdas/mns_publisher/poetry.lock | 47 ++++-- lambdas/mns_publisher/pyproject.toml | 1 + lambdas/mns_publisher/src/constants.py | 3 +- .../mns_publisher/src/create_notification.py | 6 +- lambdas/mns_publisher/src/observability.py | 21 +++ lambdas/mns_publisher/src/process_records.py | 68 ++++++--- .../tests/test_create_notification.py | 6 +- .../tests/test_lambda_handler.py | 10 +- lambdas/mns_publisher/tests/test_utils.py | 2 +- .../src/common/api_clients/authentication.py | 120 ++++++++------- .../src/common/api_clients/constants.py | 11 ++ .../src/common/api_clients/get_pds_details.py | 27 ++-- .../src/common/api_clients/mns_service.py | 6 +- .../src/common/api_clients/mns_setup.py | 13 +- lambdas/shared/src/common/cache.py | 33 ---- lambdas/shared/src/common/get_service_url.py | 6 +- .../api_clients/test_authentication.py | 86 ++++------- .../api_clients/test_mns_service.py | 22 +-- .../test_common/api_clients/test_mns_setup.py | 8 +- .../api_clients/test_pds_details.py | 141 ++---------------- .../shared/tests/test_common/test_cache.py | 88 ----------- 22 files changed, 277 insertions(+), 479 deletions(-) create mode 100644 lambdas/mns_publisher/src/observability.py delete mode 100644 lambdas/shared/src/common/cache.py delete mode 100644 lambdas/shared/tests/test_common/test_cache.py diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index 62ef6c247d..27492ceb7d 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -2,37 +2,6 @@ Operations related to PDS (Patient Demographic Service) """ -import tempfile - -from common.api_clients.authentication import AppRestrictedAuth, Service -from common.api_clients.pds_service import PdsService -from common.cache import Cache -from common.clients import get_secrets_manager_client, logger -from exceptions.id_sync_exception import IdSyncException -from os_vars import get_pds_env - -pds_env = get_pds_env() -safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") - - -# Get Patient details from external service PDS using NHS number from MNS notification -def pds_get_patient_details(nhs_number: str) -> dict: - try: - cache = Cache(directory=safe_tmp_dir) - authenticator = AppRestrictedAuth( - service=Service.PDS, - secret_manager_client=get_secrets_manager_client(), - environment=pds_env, - cache=cache, - ) - pds_service = PdsService(authenticator, pds_env) - patient = pds_service.get_patient_details(nhs_number) - return patient - except Exception as e: - msg = "Error retrieving patient details from PDS" - logger.exception(msg) - raise IdSyncException(message=msg) from e - def get_nhs_number_from_pds_resource(pds_resource: dict) -> str: """Simple helper to get the NHS Number from a PDS Resource. No handling as this is a mandatory field in the PDS diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock index 69634eb57c..c555a3c7f9 100644 --- a/lambdas/mns_publisher/poetry.lock +++ b/lambdas/mns_publisher/poetry.lock @@ -1,4 +1,33 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. + +[[package]] +name = "aws-lambda-powertools" +version = "3.24.0" +description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." +optional = false +python-versions = "<4.0.0,>=3.10" +groups = ["main"] +files = [ + {file = "aws_lambda_powertools-3.24.0-py3-none-any.whl", hash = "sha256:9c9002856f61b86f49271a9d7efa0dad322ecd22719ddc1c6bb373e57ee0421a"}, + {file = "aws_lambda_powertools-3.24.0.tar.gz", hash = "sha256:9f86959c4aeac9669da799999aae5feac7a3a86e642b52473892eaa4273d3cc3"}, +] + +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +typing-extensions = ">=4.11.0,<5.0.0" + +[package.extras] +all = ["aws-encryption-sdk (>=3.1.1,<5.0.0)", "aws-xray-sdk (>=2.8.0,<3.0.0)", "fastjsonschema (>=2.14.5,<3.0.0)", "jsonpath-ng (>=1.6.0,<2.0.0)", "pydantic (>=2.4.0,<3.0.0)", "pydantic-settings (>=2.6.1,<3.0.0)"] +aws-sdk = ["boto3 (>=1.34.32,<2.0.0)"] +datadog = ["datadog-lambda (>=8.114.0,<9.0.0)"] +datamasking = ["aws-encryption-sdk (>=3.1.1,<5.0.0)", "jsonpath-ng (>=1.6.0,<2.0.0)"] +kafka-consumer-avro = ["avro (>=1.12.0,<2.0.0)"] +kafka-consumer-protobuf = ["protobuf (>=6.30.2,<7.0.0)"] +parser = ["pydantic (>=2.4.0,<3.0.0)"] +redis = ["redis (>=4.4,<8.0)"] +tracer = ["aws-xray-sdk (>=2.8.0,<3.0.0)"] +validation = ["fastjsonschema (>=2.14.5,<3.0.0)"] +valkey = ["valkey-glide (>=1.3.5,<3.0)"] [[package]] name = "aws-lambda-typing" @@ -69,7 +98,7 @@ version = "2026.1.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, @@ -179,7 +208,7 @@ version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, @@ -493,7 +522,7 @@ version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, @@ -744,7 +773,7 @@ version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -827,7 +856,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -849,7 +878,7 @@ version = "0.26.0" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37"}, {file = "responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4"}, @@ -911,7 +940,7 @@ version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, @@ -959,4 +988,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "06d376648a4c5e1c740cd23a0bb066222c08baebca729d2eb422fe6b53e686bc" +content-hash = "add4cde377952585d01c12803a6013f7a1eb4c6720c7653ac78cab2a2af60da6" diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml index 2bafd9e372..5f7458bde7 100644 --- a/lambdas/mns_publisher/pyproject.toml +++ b/lambdas/mns_publisher/pyproject.toml @@ -19,6 +19,7 @@ boto3 = "~1.42.37" mypy-boto3-dynamodb = "^1.42.33" moto = "~5.1.20" cache = "^1.0.3" +aws-lambda-powertools = {version = "3.24.0"} [build-system] requires = ["poetry-core >= 1.5.0"] diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index 28b968a190..1896313f58 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -1,5 +1,6 @@ # Static constants for the MNS notification creation process SPEC_VERSION = "1.0" -IMMUNISATION_TYPE = "imms-vaccinations-1" +IMMUNISATION_EVENT_SOURCE = "uk.nhs.vaccinations-data-flow-management" +IMMUNISATION_EVENT_TYPE = "imms-vaccination-record-change-1" DYNAMO_DB_TYPE_DESCRIPTORS = ("S", "N", "BOOL", "M", "L") diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 8aab0b4026..fc359cef43 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -10,7 +10,7 @@ from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from common.get_service_url import get_service_url -from constants import DYNAMO_DB_TYPE_DESCRIPTORS, IMMUNISATION_TYPE, SPEC_VERSION +from constants import DYNAMO_DB_TYPE_DESCRIPTORS, IMMUNISATION_EVENT_SOURCE, IMMUNISATION_EVENT_TYPE, SPEC_VERSION IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") @@ -43,8 +43,8 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: return { "specversion": SPEC_VERSION, "id": str(uuid.uuid4()), - "source": immunisation_url, - "type": IMMUNISATION_TYPE, + "source": IMMUNISATION_EVENT_SOURCE, + "type": IMMUNISATION_EVENT_TYPE, "time": date_and_time, "subject": nhs_number, "dataref": f"{immunisation_url}/Immunization/{imms_id}", diff --git a/lambdas/mns_publisher/src/observability.py b/lambdas/mns_publisher/src/observability.py new file mode 100644 index 0000000000..3fbef40ee2 --- /dev/null +++ b/lambdas/mns_publisher/src/observability.py @@ -0,0 +1,21 @@ +""" +Centralised observability for MNS publisher Lambda. + +log_uncaught_exceptions=True ensures unexpected exceptions are captured as +structured JSON logs at the Lambda boundary. +""" + +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger + +_SERVICE_NAME = "mns-immunisation-publisher." + +logger: Logger = Logger( + service=_SERVICE_NAME, + level=os.environ.get("LOG_LEVEL", "INFO"), + log_uncaught_exceptions=True, + location=os.environ.get("POWERTOOLS_LOGGER_LOG_CALLABLE_LOCATION", "false").lower() == "true", +) diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index e55924d704..2b1408daef 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -1,33 +1,63 @@ import json import os -from typing import Tuple +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord from aws_lambda_typing.events.sqs import SQSMessage from common.api_clients.mns_service import MnsService from common.api_clients.mns_setup import get_mns_service from common.api_clients.mock_mns_service import MockMnsService -from common.clients import logger from create_notification import create_mns_notification +from observability import logger mns_env = os.getenv("MNS_ENV", "int") -MNS_TEST_QUEUE_URL = os.getenv("MNS_TEST_QUEUE_URL") +_mns_service: MnsService | MockMnsService | None = None +SqsRecord = SQSRecord | SQSMessage -def process_records(records: list[SQSMessage]) -> dict[str, list]: +def _get_message_id(record: SqsRecord) -> str: + if isinstance(record, SQSRecord): + return record.message_id + + return record.get("messageId", "unknown") + + +def _get_body(record: SqsRecord) -> dict | str: + if isinstance(record, SQSRecord): + return record.body + + return record.get("body", {}) + + +def _as_sqs_message(record: SqsRecord) -> SQSMessage: + if isinstance(record, SQSRecord): + return record.raw_event + + return record + + +def _get_runtime_mns_service() -> MnsService | MockMnsService: + global _mns_service + if _mns_service is None: + _mns_service = get_mns_service(mns_env=mns_env) + + return _mns_service + + +def process_records(records: list[SqsRecord]) -> dict[str, list]: """ Process multiple SQS records. Args: records: List of SQS records to process Returns: List of failed item identifiers for partial batch failure """ batch_item_failures = [] - mns_service = get_mns_service(mns_env=mns_env) + mns_service = _get_runtime_mns_service() for record in records: try: process_record(record, mns_service) except Exception: - message_id = record.get("messageId", "unknown") + message_id = _get_message_id(record) batch_item_failures.append({"itemIdentifier": message_id}) logger.exception("Failed to process record", extra={"message_id": message_id}) @@ -39,7 +69,7 @@ def process_records(records: list[SQSMessage]) -> dict[str, list]: return {"batchItemFailures": batch_item_failures} -def process_record(record: SQSMessage, mns_service: MnsService | MockMnsService) -> None: +def process_record(record: SqsRecord, mns_service: MnsService | MockMnsService) -> None: """ Process a single SQS record. Args: @@ -50,34 +80,36 @@ def process_record(record: SQSMessage, mns_service: MnsService | MockMnsService) message_id, immunisation_id = extract_trace_ids(record) notification_id = None - mns_notification_payload = create_mns_notification(record) + mns_notification_payload = create_mns_notification(_as_sqs_message(record)) notification_id = mns_notification_payload.get("id") action_flag = mns_notification_payload.get("filtering", {}).get("action") logger.info( "Processing message", - extra={ - "notification_id": notification_id, - "message_id": message_id, - "immunisation_id": immunisation_id, - "action_flag": action_flag, - }, + notification_id=notification_id, + message_id=message_id, + immunisation_id=immunisation_id, + action_flag=action_flag, ) mns_service.publish_notification(mns_notification_payload) - logger.info("Successfully created MNS notification", extra={"mns_notification_id": notification_id}) + + logger.info( + "Successfully created MNS notification", + mns_notification_id=notification_id, + ) -def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: +def extract_trace_ids(record: SqsRecord) -> tuple[str, str | None]: """ Extract identifiers for tracing from SQS record. Returns: Tuple of (message_id, immunisation_id) """ - sqs_message_id = record.get("messageId", "unknown") + sqs_message_id = _get_message_id(record) immunisation_id = None try: - sqs_event_body = record.get("body", {}) + sqs_event_body = _get_body(record) if isinstance(sqs_event_body, str): sqs_event_body = json.loads(sqs_event_body) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 179b1ee28b..44841923ca 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -3,7 +3,7 @@ import unittest from unittest.mock import MagicMock, patch -from constants import IMMUNISATION_TYPE, SPEC_VERSION +from constants import IMMUNISATION_EVENT_SOURCE, IMMUNISATION_EVENT_TYPE, SPEC_VERSION from create_notification import ( _unwrap_dynamodb_value, calculate_age_at_vaccination, @@ -81,8 +81,8 @@ def test_success_create_mns_notification_complete_payload(self, mock_uuid, mock_ result = create_mns_notification(self.sample_sqs_event) self.assertEqual(result["specversion"], SPEC_VERSION) - self.assertEqual(result["type"], IMMUNISATION_TYPE) - self.assertEqual(result["source"], self.expected_immunisation_url) + self.assertEqual(result["type"], IMMUNISATION_EVENT_TYPE) + self.assertEqual(result["source"], IMMUNISATION_EVENT_SOURCE) self.assertEqual(result["subject"], "9481152782") expected_dataref = f"{self.expected_immunisation_url}/Immunization/d058014c-b0fd-4471-8db9-3316175eb825" diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 1602e01214..2c45682f14 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -125,7 +125,7 @@ def setUpClass(cls): cls.sample_sqs_record = load_sample_sqs_event() @patch("process_records.logger") - @patch("process_records.get_mns_service") + @patch("process_records._get_runtime_mns_service") @patch("process_records.process_record") def test_process_records_all_success(self, mock_process_record, mock_get_mns, mock_logger): """Test processing multiple records with all successes.""" @@ -145,7 +145,7 @@ def test_process_records_all_success(self, mock_process_record, mock_get_mns, mo mock_logger.info.assert_called_with("Successfully processed all 2 messages") @patch("process_records.logger") - @patch("process_records.get_mns_service") + @patch("process_records._get_runtime_mns_service") @patch("process_records.process_record") def test_process_records_partial_failure(self, mock_process_record, mock_get_mns, mock_logger): """Test processing with some failures.""" @@ -167,7 +167,7 @@ def test_process_records_partial_failure(self, mock_process_record, mock_get_mns mock_logger.warning.assert_called_with("Batch completed with 1 failures") @patch("process_records.logger") - @patch("process_records.get_mns_service") + @patch("process_records._get_runtime_mns_service") @patch("process_records.process_record") def test_process_records_empty_list(self, mock_process_record, mock_get_mns, mock_logger): """Test processing empty record list.""" @@ -181,7 +181,7 @@ def test_process_records_empty_list(self, mock_process_record, mock_get_mns, moc mock_logger.info.assert_called_with("Successfully processed all 0 messages") @patch("process_records.logger") - @patch("process_records.get_mns_service") + @patch("process_records._get_runtime_mns_service") @patch("process_records.process_record") def test_process_records_mns_service_created_once(self, mock_process_record, mock_get_mns, mock_logger): """Test that MNS service is created only once for batch.""" @@ -300,7 +300,7 @@ def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_to @responses.activate @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") - @patch("process_records.get_mns_service") + @patch("process_records._get_runtime_mns_service") @patch("process_records.logger") def test_pds_failure(self, mock_logger, mock_get_mns, mock_get_token): """ diff --git a/lambdas/mns_publisher/tests/test_utils.py b/lambdas/mns_publisher/tests/test_utils.py index 4c6a71c15f..1783681aec 100644 --- a/lambdas/mns_publisher/tests/test_utils.py +++ b/lambdas/mns_publisher/tests/test_utils.py @@ -23,7 +23,7 @@ def load_sample_sqs_event() -> dict: Expects: lambdas/mns_publisher/tests/sqs_event.json """ sample_event_path = Path(__file__).parent / "sample_data" / "sqs_event.json" - with open(sample_event_path, "r") as f: + with open(sample_event_path) as f: raw_event = json.load(f) if isinstance(raw_event.get("body"), dict): diff --git a/lambdas/shared/src/common/api_clients/authentication.py b/lambdas/shared/src/common/api_clients/authentication.py index 396d41c195..6e589d3a6a 100644 --- a/lambdas/shared/src/common/api_clients/authentication.py +++ b/lambdas/shared/src/common/api_clients/authentication.py @@ -2,96 +2,100 @@ import json import time import uuid -from enum import Enum +from typing import Any import jwt import requests +from common.api_clients.constants import ( + ACCESS_TOKEN_EXPIRY_SECONDS, + ACCESS_TOKEN_MIN_ACCEPTABLE_LIFETIME_SECONDS, + CLIENT_ASSERTION_TYPE_JWT_BEARER, + CONTENT_TYPE_X_WWW_FORM_URLENCODED, + GRANT_TYPE_CLIENT_CREDENTIALS, + JWT_EXPIRY_SECONDS, +) from common.clients import logger from common.models.errors import UnhandledResponseError -from ..cache import Cache - - -class Service(Enum): - PDS = "pds" - IMMUNIZATION = "imms" - class AppRestrictedAuth: - def __init__(self, service: Service, secret_manager_client, environment, cache: Cache): + def __init__(self, secret_manager_client: Any, environment: str, secret_name: str | None = None): self.secret_manager_client = secret_manager_client - self.cache = cache - self.cache_key = f"{service.value}_access_token" - - self.expiry = 30 - self.secret_name = ( - f"imms/pds/{environment}/jwt-secrets" - if service == Service.PDS - else f"imms/immunization/{environment}/jwt-secrets" - ) + self.cached_access_token: str | None = None + self.cached_access_token_expiry_time: int | None = None + self.cached_service_secrets: dict[str, Any] | None = None + + self.secret_name = f"imms/outbound/{environment}/jwt-secrets" if secret_name is None else secret_name self.token_url = ( f"https://{environment}.api.service.nhs.uk/oauth2/token" if environment != "prod" else "https://api.service.nhs.uk/oauth2/token" ) - def get_service_secrets(self): - kwargs = {"SecretId": self.secret_name} - response = self.secret_manager_client.get_secret_value(**kwargs) + def get_service_secrets(self) -> dict[str, Any]: + if self.cached_service_secrets is not None: + return self.cached_service_secrets + + response = self.secret_manager_client.get_secret_value(SecretId=self.secret_name) secret_object = json.loads(response["SecretString"]) secret_object["private_key"] = base64.b64decode(secret_object["private_key_b64"]).decode() + self.cached_service_secrets = secret_object return secret_object - def create_jwt(self, now: int): - logger.info("create_jwt") + def create_jwt(self, now: int) -> str: secret_object = self.get_service_secrets() - claims = { - "iss": secret_object["api_key"], - "sub": secret_object["api_key"], - "aud": self.token_url, - "iat": now, - "exp": now + self.expiry, - "jti": str(uuid.uuid4()), - } - return jwt.encode( - claims, + { + "iss": secret_object["api_key"], + "sub": secret_object["api_key"], + "aud": self.token_url, + "iat": now, + "exp": now + JWT_EXPIRY_SECONDS, + "jti": str(uuid.uuid4()), + }, secret_object["private_key"], algorithm="RS512", headers={"kid": secret_object["kid"]}, ) - def get_access_token(self): - logger.info("get_access_token") + def _request_access_token(self, jwt_assertion: str) -> requests.Response: + return requests.post( + self.token_url, + data={ + "grant_type": GRANT_TYPE_CLIENT_CREDENTIALS, + "client_assertion_type": CLIENT_ASSERTION_TYPE_JWT_BEARER, + "client_assertion": jwt_assertion, + }, + headers={"Content-Type": CONTENT_TYPE_X_WWW_FORM_URLENCODED}, + timeout=10, + ) + + def get_access_token(self) -> str: now = int(time.time()) - logger.info(f"Current time: {now}, Expiry time: {now + self.expiry}") - # Check if token is cached and not expired - logger.info(f"Cache key: {self.cache_key}") - logger.info("Checking cache for access token") - cached = self.cache.get(self.cache_key) - - if cached and cached["expires_at"] > now: - logger.info("Returning cached access token") - return cached["token"] - - logger.info("No valid cached token found, creating new token") - _jwt = self.create_jwt(now) - - headers = {"Content-Type": "application/x-www-form-urlencoded"} - data = { - "grant_type": "client_credentials", - "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", - "client_assertion": _jwt, - } - token_response = requests.post(self.token_url, data=data, headers=headers) + + if ( + self.cached_access_token + and self.cached_access_token_expiry_time is not None + and self.cached_access_token_expiry_time > now + ACCESS_TOKEN_MIN_ACCEPTABLE_LIFETIME_SECONDS + ): + return self.cached_access_token + + logger.info("Requesting new access token") + jwt_assertion = self.create_jwt(now) + + try: + token_response = self._request_access_token(jwt_assertion) + except requests.RequestException as error: + logger.exception("Failed to fetch access token from %s", self.token_url) + raise UnhandledResponseError(response=str(error), message="Failed to get access token") from error + if token_response.status_code != 200: raise UnhandledResponseError(response=token_response.text, message="Failed to get access token") token = token_response.json().get("access_token") - - self.cache.put(self.cache_key, {"token": token, "expires_at": now + self.expiry}) - + self.cached_access_token = token + self.cached_access_token_expiry_time = now + ACCESS_TOKEN_EXPIRY_SECONDS return token diff --git a/lambdas/shared/src/common/api_clients/constants.py b/lambdas/shared/src/common/api_clients/constants.py index aa305f146f..9730afeed6 100644 --- a/lambdas/shared/src/common/api_clients/constants.py +++ b/lambdas/shared/src/common/api_clients/constants.py @@ -37,3 +37,14 @@ class MnsNotificationPayload(TypedDict): subject: str dataref: str filtering: FilteringData + + +GRANT_TYPE_CLIENT_CREDENTIALS = "client_credentials" +CLIENT_ASSERTION_TYPE_JWT_BEARER = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer" +CONTENT_TYPE_X_WWW_FORM_URLENCODED = "application/x-www-form-urlencoded" + +JWT_EXPIRY_SECONDS = 5 * 60 +ACCESS_TOKEN_EXPIRY_SECONDS = 10 * 60 +# Throw away the cached token earlier than the exact expiry time so we have enough +# time left to use it (and to account for network latency, clock skew etc.) +ACCESS_TOKEN_MIN_ACCEPTABLE_LIFETIME_SECONDS = 30 diff --git a/lambdas/shared/src/common/api_clients/get_pds_details.py b/lambdas/shared/src/common/api_clients/get_pds_details.py index 63844b3cda..7f728c81e8 100644 --- a/lambdas/shared/src/common/api_clients/get_pds_details.py +++ b/lambdas/shared/src/common/api_clients/get_pds_details.py @@ -3,30 +3,33 @@ """ import os -import tempfile -from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.authentication import AppRestrictedAuth from common.api_clients.errors import PdsSyncException from common.api_clients.pds_service import PdsService -from common.cache import Cache from common.clients import get_secrets_manager_client, logger PDS_ENV = os.getenv("PDS_ENV", "int") -safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") # NOSONAR(S5443) +_pds_service: PdsService | None = None -# Get Patient details from external service PDS using NHS number from MNS notification -def pds_get_patient_details(nhs_number: str) -> dict: - try: - cache = Cache(directory=safe_tmp_dir) + +def get_pds_service() -> PdsService: + global _pds_service + if _pds_service is None: authenticator = AppRestrictedAuth( - service=Service.PDS, secret_manager_client=get_secrets_manager_client(), environment=PDS_ENV, - cache=cache, ) - pds_service = PdsService(authenticator, PDS_ENV) - patient = pds_service.get_patient_details(nhs_number) + _pds_service = PdsService(authenticator, PDS_ENV) + + return _pds_service + + +# Get Patient details from external service PDS using NHS number from MNS notification +def pds_get_patient_details(nhs_number: str) -> dict: + try: + patient = get_pds_service().get_patient_details(nhs_number) return patient except Exception as e: msg = "Error retrieving patient details from PDS" diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 417fef6ad5..263c58e014 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -25,7 +25,6 @@ class MnsService: def __init__(self, authenticator: AppRestrictedAuth): self.authenticator = authenticator - self.access_token = self.authenticator.get_access_token() logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") def _build_subscription_payload(self, event_type: str, reason: str | None = None, status: str = "requested") -> dict: @@ -54,9 +53,10 @@ def _build_subscription_payload(self, event_type: str, reason: str | None = None def _build_headers(self, content_type: str = "application/fhir+json") -> dict: """Build request headers with authentication and correlation ID.""" + access_token = self.authenticator.get_access_token() return { "Content-Type": content_type, - "Authorization": f"Bearer {self.access_token}", + "Authorization": f"Bearer {access_token}", "X-Correlation-ID": str(uuid.uuid4()), } @@ -138,7 +138,7 @@ def check_delete_subscription(self): return f"Error deleting subscription: {str(e)}" def publish_notification(self, notification_payload: MnsNotificationPayload) -> dict | None: - response = requests.request( + response = request_with_retry_backoff( "POST", f"{MNS_BASE_URL}/events", headers=self._build_headers(content_type="application/cloudevents+json"), diff --git a/lambdas/shared/src/common/api_clients/mns_setup.py b/lambdas/shared/src/common/api_clients/mns_setup.py index 5cecd4440f..f7398d61f0 100644 --- a/lambdas/shared/src/common/api_clients/mns_setup.py +++ b/lambdas/shared/src/common/api_clients/mns_setup.py @@ -1,14 +1,11 @@ import logging import os -import boto3 -from botocore.config import Config - -from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.authentication import AppRestrictedAuth from common.api_clients.constants import DEV_ENVIRONMENT from common.api_clients.mns_service import MnsService from common.api_clients.mock_mns_service import MockMnsService -from common.cache import Cache +from common.clients import get_secrets_manager_client logging.basicConfig(level=logging.INFO) MNS_TEST_QUEUE_URL = os.getenv("MNS_TEST_QUEUE_URL") @@ -19,14 +16,10 @@ def get_mns_service(mns_env: str = "int"): logging.info("Dev environment: Using MockMnsService") return MockMnsService(MNS_TEST_QUEUE_URL) else: - boto_config = Config(region_name="eu-west-2") - cache = Cache(directory="/tmp") logging.info("Creating authenticator...") authenticator = AppRestrictedAuth( - service=Service.PDS, - secret_manager_client=boto3.client("secretsmanager", config=boto_config), + secret_manager_client=get_secrets_manager_client(), environment=mns_env, - cache=cache, ) logging.info("Authentication Initiated...") return MnsService(authenticator) diff --git a/lambdas/shared/src/common/cache.py b/lambdas/shared/src/common/cache.py deleted file mode 100644 index 94fd9abbd3..0000000000 --- a/lambdas/shared/src/common/cache.py +++ /dev/null @@ -1,33 +0,0 @@ -import json - - -class Cache: - """Key-value file cache""" - - def __init__(self, directory): - filename = f"{directory}/cache.json" - with open(filename, "a+") as self.cache_file: - self.cache_file.seek(0) - content = self.cache_file.read() - if len(content) == 0: - self.cache_dict = {} - else: - self.cache_dict = json.loads(content) - - def put(self, key: str, value: dict): - self.cache_dict[key] = value - self._overwrite() - - def get(self, key: str) -> dict | None: - return self.cache_dict.get(key, None) - - def delete(self, key: str): - if key not in self.cache_dict: - return - del self.cache_dict[key] - - def _overwrite(self): - with open(self.cache_file.name, "w") as self.cache_file: - self.cache_file.seek(0) - self.cache_file.write(json.dumps(self.cache_dict)) - self.cache_file.truncate() diff --git a/lambdas/shared/src/common/get_service_url.py b/lambdas/shared/src/common/get_service_url.py index 9188c07509..212340e0bf 100644 --- a/lambdas/shared/src/common/get_service_url.py +++ b/lambdas/shared/src/common/get_service_url.py @@ -1,9 +1,7 @@ -from typing import Optional - from common.constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX -def get_service_url(service_env: Optional[str], service_base_path: Optional[str]) -> str: +def get_service_url(service_env: str | None, service_base_path: str | None) -> str: """Sets the service URL based on service parameters derived from env vars. PR environments use internal-dev while we also default to this environment. The only other exceptions are preprod which maps to the Apigee int environment and prod which does not have a subdomain.""" @@ -22,5 +20,5 @@ def get_service_url(service_env: Optional[str], service_base_path: Optional[str] return f"https://{subdomain}api.service.nhs.uk/{service_base_path}" -def is_pr_env(service_env: Optional[str]) -> bool: +def is_pr_env(service_env: str | None) -> bool: return service_env is not None and service_env.startswith(PR_ENV_PREFIX) diff --git a/lambdas/shared/tests/test_common/api_clients/test_authentication.py b/lambdas/shared/tests/test_common/api_clients/test_authentication.py index 11fc2e1d83..fd91591ca2 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_authentication.py +++ b/lambdas/shared/tests/test_common/api_clients/test_authentication.py @@ -7,7 +7,7 @@ import responses from responses import matchers -from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.authentication import ACCESS_TOKEN_EXPIRY_SECONDS, AppRestrictedAuth from common.models.errors import UnhandledResponseError @@ -29,11 +29,8 @@ def setUp(self): self.secret_manager_client = MagicMock() self.secret_manager_client.get_secret_value.return_value = secret_response - self.cache = MagicMock() - self.cache.get.return_value = None - env = "an-env" - self.authenticator = AppRestrictedAuth(Service.PDS, self.secret_manager_client, env, self.cache) + self.authenticator = AppRestrictedAuth(self.secret_manager_client, env) self.url = f"https://{env}.api.service.nhs.uk/oauth2/token" @responses.activate @@ -89,35 +86,47 @@ def test_env_mapping(self): """it should target int environment for none-prod environment, otherwise int""" # For env=none-prod env = "some-env" - auth = AppRestrictedAuth(Service.PDS, None, env, None) + auth = AppRestrictedAuth(None, env) self.assertTrue(auth.token_url.startswith(f"https://{env}.")) # For env=prod env = "prod" - auth = AppRestrictedAuth(Service.PDS, None, env, None) - self.assertTrue(env not in auth.token_url) + auth = AppRestrictedAuth(None, env) + self.assertNotIn(env, auth.token_url) def test_returned_cached_token(self): """it should return cached token""" - cached_token = { - "token": "a-cached-access-token", - "expires_at": int(time.time()) + 99999, # make sure it's not expired - } - self.cache.get.return_value = cached_token + self.authenticator.cached_access_token = "a-cached-access-token" + self.authenticator.cached_access_token_expiry_time = int(time.time()) + 99999 # make sure it's not expired # When token = self.authenticator.get_access_token() # Then - self.assertEqual(token, cached_token["token"]) + self.assertEqual(token, "a-cached-access-token") + self.secret_manager_client.assert_not_called() + + def test_returned_cached_service_secrets(self): + """it should return cached service secrets""" + cached_secrets = { + "api_key": self.api_key, + "kid": self.kid, + "private_key_b64": "unused", + "private_key": self.private_key, + } + self.authenticator.cached_service_secrets = cached_secrets + + # When + act_secrets = self.authenticator.get_service_secrets() + + # Then + self.assertEqual(act_secrets, cached_secrets) self.secret_manager_client.assert_not_called() @responses.activate def test_update_cache(self): """it should update cached token""" - self.cache.get.return_value = None token = "a-new-access-token" - cached_token = {"token": token, "expires_at": ANY} responses.add(responses.POST, self.url, status=200, json={"access_token": token}) with patch("jwt.encode") as mock_jwt: @@ -126,18 +135,15 @@ def test_update_cache(self): self.authenticator.get_access_token() # Then - self.cache.put.assert_called_once_with(f"{Service.PDS.value}_access_token", cached_token) + self.assertEqual(self.authenticator.cached_access_token, "a-new-access-token") @responses.activate def test_expired_token_in_cache(self): """it should not return cached access token if it's expired""" now_epoch = 12345 - expires_at = now_epoch + self.authenticator.expiry - cached_token = { - "token": "an-expired-cached-access-token", - "expires_at": expires_at, - } - self.cache.get.return_value = cached_token + expires_at = now_epoch + ACCESS_TOKEN_EXPIRY_SECONDS + self.authenticator.cached_access_token = ("an-expired-cached-access-token",) + self.authenticator.cached_access_token_expiry_time = expires_at new_token = "a-new-token" responses.add(responses.POST, self.url, status=200, json={"access_token": new_token}) @@ -151,42 +157,12 @@ def test_expired_token_in_cache(self): self.authenticator.get_access_token() # Then - exp_cached_token = { - "token": new_token, - "expires_at": new_now + self.authenticator.expiry, - } - self.cache.put.assert_called_once_with(ANY, exp_cached_token) - - @responses.activate - def test_uses_cache_for_token(self): - """it should use the cache for the `Service` auth call""" - - token = "a-new-access-token" - token_call = responses.add(responses.POST, self.url, status=200, json={"access_token": token}) - values = {} - - def get_side_effect(key): - return values.get(key, None) - - def put_side_effect(key, value): - values[key] = value - - self.cache.get.side_effect = get_side_effect - self.cache.put.side_effect = put_side_effect - - with patch("common.api_clients.authentication.jwt.encode") as mock_jwt: - mock_jwt.return_value = "a-jwt" - # When - self.assertEqual(0, token_call.call_count) - self.authenticator.get_access_token() - self.assertEqual(1, token_call.call_count) - self.authenticator.get_access_token() - self.assertEqual(1, token_call.call_count) + self.assertEqual(self.authenticator.cached_access_token, new_token) + self.assertEqual(self.authenticator.cached_access_token_expiry_time, new_now + ACCESS_TOKEN_EXPIRY_SECONDS) @responses.activate def test_raise_exception(self): """it should raise exception if auth response is not 200""" - self.cache.get.return_value = None responses.add(responses.POST, self.url, status=400) with patch("common.api_clients.authentication.jwt.encode") as mock_jwt: diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py index 3cc9daab9e..b1f6191adb 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py @@ -50,7 +50,7 @@ def test_successful_subscription(self, mock_request): # Assert self.assertEqual(result, {"subscriptionId": "abc123"}) self.assertEqual(mock_request.call_count, 2) - self.authenticator.get_access_token.assert_called_once() + self.assertGreaterEqual(self.authenticator.get_access_token.call_count, 1) @patch("common.api_clients.mns_service.requests.request") def test_not_found_subscription(self, mock_request): @@ -293,13 +293,13 @@ def test_unhandled_status_code(self): self.assertIn("Unhandled error: 418", str(context.exception)) self.assertEqual(context.exception.response, {"resource": 1234}) - @patch("common.api_clients.mns_service.requests.request") - def test_publish_notification_success(self, mock_request): + @patch("common.api_clients.mns_service.request_with_retry_backoff") + def test_publish_notification_success(self, mock_request_with_retry_backoff): """Test successful notification publishing.""" mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = {"status": "published"} - mock_request.return_value = mock_response + mock_request_with_retry_backoff.return_value = mock_response notification_payload = { "specversion": "1.0", @@ -313,27 +313,27 @@ def test_publish_notification_success(self, mock_request): self.assertEqual(result["status"], "published") - # Verify the request was made correctly - mock_request.assert_called_once() - call_args = mock_request.call_args + # Verify the request was made correctly through retry helper + mock_request_with_retry_backoff.assert_called_once() + call_args = mock_request_with_retry_backoff.call_args headers = call_args[1]["headers"] self.assertEqual(headers["Content-Type"], "application/cloudevents+json") - mock_request.assert_called_once() - @patch("common.api_clients.mns_service.requests.request") + @patch("common.api_clients.mns_service.request_with_retry_backoff") @patch("common.api_clients.mns_service.raise_error_response") - def test_publish_notification_failure(self, mock_raise_error, mock_request): + def test_publish_notification_failure(self, mock_raise_error, mock_request_with_retry_backoff): """Test notification publishing failure.""" mock_response = Mock() mock_response.status_code = 400 - mock_request.return_value = mock_response + mock_request_with_retry_backoff.return_value = mock_response notification_payload = {"id": "test-id"} service = MnsService(self.authenticator) service.publish_notification(notification_payload) + mock_request_with_retry_backoff.assert_called_once() mock_raise_error.assert_called_once_with(mock_response) diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py index 06fe1959cf..ed1e4ebcf0 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py @@ -5,10 +5,10 @@ class TestGetMnsService(unittest.TestCase): - @patch("common.api_clients.mns_setup.boto3.client") + @patch("common.api_clients.mns_setup.get_secrets_manager_client") @patch("common.api_clients.mns_setup.AppRestrictedAuth") @patch("common.api_clients.mns_setup.MnsService") - def test_get_mns_service(self, mock_mns_service, mock_app_auth, mock_boto_client): + def test_get_mns_service(self, mock_mns_service, mock_app_auth, mock_get_secrets_manager_client): # Arrange mock_auth_instance = MagicMock() mock_app_auth.return_value = mock_auth_instance @@ -17,14 +17,14 @@ def test_get_mns_service(self, mock_mns_service, mock_app_auth, mock_boto_client mock_mns_service.return_value = mock_mns_instance mock_secrets_client = MagicMock() - mock_boto_client.return_value = mock_secrets_client + mock_get_secrets_manager_client.return_value = mock_secrets_client # Act result = get_mns_service("int") # Assert self.assertEqual(result, mock_mns_instance) - mock_boto_client.assert_called_once_with("secretsmanager", config=mock_boto_client.call_args[1]["config"]) + mock_get_secrets_manager_client.assert_called_once_with() mock_app_auth.assert_called_once() mock_mns_service.assert_called_once_with(mock_auth_instance) diff --git a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py index f833c10d07..e58b430ee8 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py +++ b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py @@ -2,30 +2,17 @@ from unittest.mock import MagicMock, patch from common.api_clients.errors import PdsSyncException -from common.api_clients.get_pds_details import pds_get_patient_details +from common.api_clients.get_pds_details import get_pds_service, pds_get_patient_details class TestGetPdsPatientDetails(unittest.TestCase): def setUp(self): - """Set up test fixtures and mocks""" self.test_patient_id = "9912003888" + get_pds_service.__globals__["_pds_service"] = None - # Patch all external dependencies self.logger_patcher = patch("common.api_clients.get_pds_details.logger") self.mock_logger = self.logger_patcher.start() - self.secrets_manager_patcher = patch("common.clients.global_secrets_manager_client") - self.mock_secrets_manager = self.secrets_manager_patcher.start() - - self.pds_env_patcher = patch("os.getenv") - self.mock_pds_env = self.pds_env_patcher.start() - self.mock_pds_env.return_value = "test-env" - - self.cache_patcher = patch("common.api_clients.get_pds_details.Cache") - self.mock_cache_class = self.cache_patcher.start() - self.mock_cache_instance = MagicMock() - self.mock_cache_class.return_value = self.mock_cache_instance - self.auth_patcher = patch("common.api_clients.get_pds_details.AppRestrictedAuth") self.mock_auth_class = self.auth_patcher.start() self.mock_auth_instance = MagicMock() @@ -37,12 +24,10 @@ def setUp(self): self.mock_pds_service_class.return_value = self.mock_pds_service_instance def tearDown(self): - """Clean up patches""" + get_pds_service.__globals__["_pds_service"] = None patch.stopall() def test_pds_get_patient_details_success(self): - """Test successful retrieval of patient details""" - # Arrange expected_patient_data = { "identifier": [{"value": "9912003888"}], "name": "John Doe", @@ -51,159 +36,55 @@ def test_pds_get_patient_details_success(self): } self.mock_pds_service_instance.get_patient_details.return_value = expected_patient_data - # Act result = pds_get_patient_details(self.test_patient_id) - # Assert self.assertEqual(result["identifier"][0]["value"], "9912003888") - - # Verify Cache was initialized correctly - self.mock_cache_class.assert_called_once() - - # Verify get_patient_details was called - self.mock_pds_service_instance.get_patient_details.assert_called_once() + self.mock_auth_class.assert_called_once() + self.mock_pds_service_class.assert_called_once() def test_pds_get_patient_details_no_patient_found(self): - """Test when PDS returns None (no patient found)""" - # Arrange self.mock_pds_service_instance.get_patient_details.return_value = None - # Act result = pds_get_patient_details(self.test_patient_id) - # Assert self.assertIsNone(result) - self.mock_pds_service_instance.get_patient_details.assert_called_once_with(self.test_patient_id) - def test_pds_get_patient_details_empty_response(self): - """Test when PDS returns empty dict (falsy)""" - # Arrange - self.mock_pds_service_instance.get_patient_details.return_value = None - - # Act - result = pds_get_patient_details(self.test_patient_id) - - # Assert - self.assertIsNone(result) - def test_pds_get_patient_details_pds_service_exception(self): - """Test when PdsService.get_patient_details raises an exception""" - # Arrange mock_exception = Exception("My custom error") self.mock_pds_service_instance.get_patient_details.side_effect = mock_exception - # Act with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) exception = context.exception - # Assert self.assertEqual( exception.message, "Error retrieving patient details from PDS", ) - # Verify exception was logged self.mock_logger.exception.assert_called_once_with("Error retrieving patient details from PDS") - self.mock_pds_service_instance.get_patient_details.assert_called_once_with(self.test_patient_id) - def test_pds_get_patient_details_cache_initialization_error(self): - """Test when Cache initialization fails""" - # Arrange - self.mock_cache_class.side_effect = OSError("Cannot write to /tmp") - - # Act - with self.assertRaises(PdsSyncException) as context: - pds_get_patient_details(self.test_patient_id) - - # Assert - exception = context.exception - self.assertEqual( - exception.message, - "Error retrieving patient details from PDS", - ) - - # Verify exception was logged - self.mock_logger.exception.assert_called_once_with("Error retrieving patient details from PDS") - - self.mock_cache_class.assert_called_once() - def test_pds_get_patient_details_auth_initialization_error(self): - """Test when AppRestrictedAuth initialization fails""" - # Arrange self.mock_auth_class.side_effect = ValueError("Invalid authentication parameters") - # Act with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) - # Assert exception = context.exception self.assertEqual( exception.message, "Error retrieving patient details from PDS", ) - # Verify exception was logged self.mock_logger.exception.assert_called_once_with("Error retrieving patient details from PDS") - def test_pds_get_patient_details_exception(self): - """Test when logger.info throws an exception""" - # Arrange - test_exception = Exception("some-random-error") - self.mock_pds_service_class.side_effect = test_exception - test_nhs_number = "another-nhs-number" - - # Act - with self.assertRaises(Exception) as context: - pds_get_patient_details(test_nhs_number) - - exception = context.exception - # Assert - self.assertEqual( - exception.message, - "Error retrieving patient details from PDS", - ) - # Verify logger.exception was called due to the caught exception - self.mock_logger.exception.assert_called_once_with("Error retrieving patient details from PDS") + def test_reuses_same_pds_service_instance(self): + pds_get_patient_details("1111111111") + pds_get_patient_details("2222222222") - def test_pds_get_patient_details_different_patient_ids(self): - """Test with different patient ID formats""" - test_cases = [ - ("9912003888", {"identifier": [{"value": "9912003888"}]}), - ("1234567890", {"identifier": [{"value": "1234567890"}]}), - ("0000000000", {"identifier": [{"value": "0000000000"}]}), - ] - - for patient_id, expected_response in test_cases: - with self.subTest(patient_id=patient_id): - # Reset mocks - self.mock_pds_service_instance.reset_mock() - self.mock_logger.reset_mock() - - # Arrange - self.mock_pds_service_instance.get_patient_details.return_value = expected_response - - # Act - result = pds_get_patient_details(patient_id) - - # Assert - self.assertEqual(result, expected_response) - self.mock_pds_service_instance.get_patient_details.assert_called_once_with(patient_id) - - def test_pds_get_patient_details(self): - """Test with complex identifier structure""" - # Arrange - test_nhs_number = "9912003888" - pds_id = "abcefghijkl" - mock_pds_response = {"identifier": [{"value": pds_id}]} - self.mock_pds_service_instance.get_patient_details.return_value = mock_pds_response - # Act - result = pds_get_patient_details(test_nhs_number) - - # Assert - function should extract the value from first identifier - self.assertEqual(result, mock_pds_response) - self.mock_pds_service_instance.get_patient_details.assert_called_once_with(test_nhs_number) + self.mock_auth_class.assert_called_once() + self.mock_pds_service_class.assert_called_once() + self.assertEqual(self.mock_pds_service_instance.get_patient_details.call_count, 2) diff --git a/lambdas/shared/tests/test_common/test_cache.py b/lambdas/shared/tests/test_common/test_cache.py deleted file mode 100644 index 8125099ac8..0000000000 --- a/lambdas/shared/tests/test_common/test_cache.py +++ /dev/null @@ -1,88 +0,0 @@ -import json -import os -import tempfile -import unittest - -from src.common.cache import Cache - - -class TestCache(unittest.TestCase): - def setUp(self): - self.cache = Cache(tempfile.gettempdir()) - - def test_cache_put(self): - """it should store cache in specified key""" - value = {"foo": "a-foo", "bar": 42} - key = "a_key" - - # When - self.cache.put(key, value) - act_value = self.cache.get(key) - - # Then - self.assertDictEqual(value, act_value) - - def test_cache_put_overwrite(self): - """it should store updated cache value""" - value = {"foo": "a-foo", "bar": 42} - key = "a_key" - self.cache.put(key, value) - - new_value = {"foo": "new-foo"} - self.cache.put(key, new_value) - - # When - updated_value = self.cache.get(key) - - # Then - self.assertDictEqual(new_value, updated_value) - - def test_key_not_found(self): - """it should return None if key doesn't exist""" - value = self.cache.get("it-does-not-exist") - self.assertIsNone(value) - - def test_delete(self): - """it should delete key""" - key = "a_key" - self.cache.put(key, {"a": "b"}) - self.cache.delete(key) - - value = self.cache.get(key) - self.assertIsNone(value) - - def test_delete_key_not_found(self): - """it should return None gracefully if key doesn't exist""" - value = self.cache.delete("it-does-not-exist") - self.assertIsNone(value) - - def test_write_to_file(self): - """it should update the cache file""" - value = {"foo": "a-long-foo-so-to-make-sure-truncate-is-working", "bar": 42} - key = "a_key" - self.cache.put(key, value) - # Add one and delete to make sure file gets updated - self.cache.put("to-delete-key", {"x": "y"}) - self.cache.delete("to-delete-key") - - # When - new_value = {"a": "b"} - self.cache.put(key, new_value) - - # Then - with open(self.cache.cache_file.name) as stored: - content = json.loads(stored.read()) - self.assertDictEqual(content[key], new_value) - - def test_cache_create_empty(self): - """it should gracefully create an empty cache""" - filename = f"{tempfile.gettempdir()}/cache.json" - os.remove(filename) - - # When - self.cache = Cache(tempfile.gettempdir()) - - # Then - with open(self.cache.cache_file.name) as stored: - content = stored.read() - self.assertEqual(len(content), 0) From 295a1d5c3eb2671367656807b1f752d42b65da67 Mon Sep 17 00:00:00 2001 From: Daniel Yip Date: Fri, 13 Mar 2026 22:06:50 +0000 Subject: [PATCH 05/14] Modify mns publisher infra to account properly for test queue (#1288) --- infrastructure/instance/dynamodb.tf | 4 ++-- .../environments/dev/internal-qa/variables.tfvars | 1 - .../instance/environments/dev/pr/variables.tfvars | 1 - .../environments/preprod/int-blue/variables.tfvars | 1 - .../environments/preprod/int-green/variables.tfvars | 1 - .../instance/environments/prod/blue/variables.tfvars | 1 - .../instance/environments/prod/green/variables.tfvars | 1 - infrastructure/instance/mns_publisher.tf | 3 +-- .../instance/modules/mns_publisher/outputs.tf | 9 +++++++++ .../modules/mns_publisher/sqs_test_publish_mns.tf | 10 ---------- infrastructure/instance/outputs.tf | 10 +++++----- infrastructure/instance/variables.tf | 5 ----- 12 files changed, 17 insertions(+), 30 deletions(-) create mode 100644 infrastructure/instance/modules/mns_publisher/outputs.tf diff --git a/infrastructure/instance/dynamodb.tf b/infrastructure/instance/dynamodb.tf index 0cfcbbbf06..41d1827fe1 100644 --- a/infrastructure/instance/dynamodb.tf +++ b/infrastructure/instance/dynamodb.tf @@ -68,8 +68,8 @@ resource "aws_dynamodb_table" "delta-dynamodb-table" { name = "imms-${local.resource_scope}-delta" billing_mode = "PAY_PER_REQUEST" hash_key = "PK" - stream_enabled = var.mns_publisher_feature_enabled - stream_view_type = var.mns_publisher_feature_enabled ? "NEW_IMAGE" : null + stream_enabled = true + stream_view_type = "NEW_IMAGE" deletion_protection_enabled = !local.is_temp attribute { diff --git a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars index 409096620f..130fea83cc 100644 --- a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars @@ -4,6 +4,5 @@ dspp_core_account_id = "603871901111" pds_environment = "int" mns_environment = "dev" error_alarm_notifications_enabled = false -mns_publisher_feature_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/dev/pr/variables.tfvars b/infrastructure/instance/environments/dev/pr/variables.tfvars index 26f288fef0..130fea83cc 100644 --- a/infrastructure/instance/environments/dev/pr/variables.tfvars +++ b/infrastructure/instance/environments/dev/pr/variables.tfvars @@ -4,6 +4,5 @@ dspp_core_account_id = "603871901111" pds_environment = "int" mns_environment = "dev" error_alarm_notifications_enabled = false -mns_publisher_feature_enabled = true # Switch this off once tested fully e2e in Lambda branch create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars index 3d5c79af36..0b3107be75 100644 --- a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars @@ -4,7 +4,6 @@ dspp_core_account_id = "603871901111" pds_environment = "int" mns_environment = "int" error_alarm_notifications_enabled = true -mns_publisher_feature_enabled = true # mesh no invocation period metric set to 3 days (in seconds) for preprod environment i.e 3 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 259200 diff --git a/infrastructure/instance/environments/preprod/int-green/variables.tfvars b/infrastructure/instance/environments/preprod/int-green/variables.tfvars index 3d5c79af36..0b3107be75 100644 --- a/infrastructure/instance/environments/preprod/int-green/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-green/variables.tfvars @@ -4,7 +4,6 @@ dspp_core_account_id = "603871901111" pds_environment = "int" mns_environment = "int" error_alarm_notifications_enabled = true -mns_publisher_feature_enabled = true # mesh no invocation period metric set to 3 days (in seconds) for preprod environment i.e 3 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 259200 diff --git a/infrastructure/instance/environments/prod/blue/variables.tfvars b/infrastructure/instance/environments/prod/blue/variables.tfvars index 447e9972d4..86e278bdc5 100644 --- a/infrastructure/instance/environments/prod/blue/variables.tfvars +++ b/infrastructure/instance/environments/prod/blue/variables.tfvars @@ -5,7 +5,6 @@ mns_account_id = "758334270304" pds_environment = "prod" mns_environment = "prod" error_alarm_notifications_enabled = true -mns_publisher_feature_enabled = true # mesh no invocation period metric set to 1 day (in seconds) for prod environment i.e 1 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 86400 diff --git a/infrastructure/instance/environments/prod/green/variables.tfvars b/infrastructure/instance/environments/prod/green/variables.tfvars index 447e9972d4..86e278bdc5 100644 --- a/infrastructure/instance/environments/prod/green/variables.tfvars +++ b/infrastructure/instance/environments/prod/green/variables.tfvars @@ -5,7 +5,6 @@ mns_account_id = "758334270304" pds_environment = "prod" mns_environment = "prod" error_alarm_notifications_enabled = true -mns_publisher_feature_enabled = true # mesh no invocation period metric set to 1 day (in seconds) for prod environment i.e 1 * 24 * 60 * 60 mesh_no_invocation_period_seconds = 86400 diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index 7b6cc9f94d..d094919885 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -1,10 +1,9 @@ module "mns_publisher" { source = "./modules/mns_publisher" - count = var.mns_publisher_feature_enabled ? 1 : 0 ddb_delta_stream_arn = aws_dynamodb_table.delta-dynamodb-table.stream_arn dynamo_kms_encryption_key_arn = data.aws_kms_key.existing_dynamo_encryption_key.arn - enable_lambda_alarm = var.error_alarm_notifications_enabled # consider just INT and PROD + enable_lambda_alarm = var.error_alarm_notifications_enabled immunisation_account_id = var.immunisation_account_id is_temp = local.is_temp enable_mns_test_queue = var.mns_environment == "dev" diff --git a/infrastructure/instance/modules/mns_publisher/outputs.tf b/infrastructure/instance/modules/mns_publisher/outputs.tf new file mode 100644 index 0000000000..ef96d5ef2e --- /dev/null +++ b/infrastructure/instance/modules/mns_publisher/outputs.tf @@ -0,0 +1,9 @@ +output "mns_test_queue_url" { + value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].url : null + description = "URL of the MNS test notifications queue (DEV only)" +} + +output "mns_test_queue_arn" { + value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].arn : null + description = "ARN of the MNS test notifications queue (DEV only)" +} diff --git a/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf b/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf index a7cffcb32b..bf084173a6 100644 --- a/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf +++ b/infrastructure/instance/modules/mns_publisher/sqs_test_publish_mns.tf @@ -33,13 +33,3 @@ resource "aws_sqs_queue_policy" "mns_test_notification_sqs" { queue_url = aws_sqs_queue.mns_test_notification[0].id policy = data.aws_iam_policy_document.mns_test_notification_sqs_policy[0].json } - -output "mns_test_queue_url" { - value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].url : null - description = "URL of the MNS test notifications queue" -} - -output "mns_test_queue_arn" { - value = var.enable_mns_test_queue ? aws_sqs_queue.mns_test_notification[0].arn : null - description = "ARN of the MNS test notifications queue" -} \ No newline at end of file diff --git a/infrastructure/instance/outputs.tf b/infrastructure/instance/outputs.tf index 0cd2b8b9d7..21fd67a5e3 100644 --- a/infrastructure/instance/outputs.tf +++ b/infrastructure/instance/outputs.tf @@ -20,11 +20,11 @@ output "id_sync_queue_arn" { } output "mns_test_queue_url" { - value = var.mns_publisher_feature_enabled ? module.mns_publisher[0].mns_test_queue_url : null - description = "URL of the MNS test notifications queue (from mns_publisher module)" + value = module.mns_publisher.mns_test_queue_url + description = "URL of the MNS test notifications queue (from mns_publisher module) - DEV only" } output "mns_test_queue_arn" { - value = var.mns_publisher_feature_enabled ? module.mns_publisher[0].mns_test_queue_arn : null - description = "ARN of the MNS test notifications queue (from mns_publisher module)" -} \ No newline at end of file + value = module.mns_publisher.mns_test_queue_arn + description = "ARN of the MNS test notifications queue (from mns_publisher module) - DEV only" +} diff --git a/infrastructure/instance/variables.tf b/infrastructure/instance/variables.tf index 279d1f4e34..39e7b7e07a 100644 --- a/infrastructure/instance/variables.tf +++ b/infrastructure/instance/variables.tf @@ -91,11 +91,6 @@ variable "error_alarm_notifications_enabled" { type = bool } -variable "mns_publisher_feature_enabled" { - default = false - description = "Switch to the MNS Publisher feature which allows us to publish Immunisation events." - type = bool -} variable "has_sub_environment_scope" { description = "True if the sub-environment is a standalone environment, e.g. internal-dev. False if it is part of a blue-green split, e.g. int-green." type = bool From b49dfe4a89c75e9c50562d7dd669e8d71e0eb791 Mon Sep 17 00:00:00 2001 From: Akinola Olutola Date: Mon, 16 Mar 2026 12:04:28 +0000 Subject: [PATCH 06/14] remove default apigee env (#1297) --- lambdas/shared/src/common/api_clients/mns_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 263c58e014..90ca4a1c19 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -12,7 +12,7 @@ SQS_ARN = os.getenv("SQS_ARN") -apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") +apigee_env = os.getenv("APIGEE_ENVIRONMENT") mns_env = os.getenv("MNS_ENV", "int") env = apigee_env or mns_env MNS_BASE_URL = ( From 44eef2a6733642a6f139e60efc1a2bd581abb694 Mon Sep 17 00:00:00 2001 From: Akshay Shetty Date: Mon, 16 Mar 2026 18:14:08 +0000 Subject: [PATCH 07/14] added tests and refactored calculate age for vaccination from bday (#1306) --- .../mns_publisher/src/create_notification.py | 30 ++++++-- .../tests/test_create_notification.py | 76 +++++++++---------- 2 files changed, 57 insertions(+), 49 deletions(-) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index fc359cef43..0459b4a8b9 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,7 +1,7 @@ import json import os import uuid -from datetime import datetime +from datetime import date, datetime from typing import Any from aws_lambda_typing.events.sqs import SQSMessage @@ -59,22 +59,36 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: } +def _parse_compact_date(value: str, field_name: str) -> date: + if not isinstance(value, str) or not value: + raise ValueError(f"{field_name} is required") + + date_part = value[:8] + if len(date_part) != 8 or not date_part.isdigit(): + raise ValueError(f"{field_name} must start with YYYYMMDD") + + try: + return datetime.strptime(date_part, "%Y%m%d").date() + except ValueError as e: + raise ValueError(f"{field_name} must contain a valid date in YYYYMMDD format") from e + + def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: """ Calculate patient age in years at time of vaccination. Expects dates in format: YYYYMMDD or YYYYMMDDThhmmsszz """ - birth_date_str = birth_date[:8] if len(birth_date) >= 8 else birth_date - vacc_date_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date + date_of_birth = _parse_compact_date(birth_date, "PERSON_DOB") + date_of_vaccination = _parse_compact_date(vaccination_date, "DATE_AND_TIME") - date_of_birth = datetime.strptime(birth_date_str, "%Y%m%d") - date_of_vaccination = datetime.strptime(vacc_date_str, "%Y%m%d") + if date_of_vaccination < date_of_birth: + raise ValueError("DATE_AND_TIME cannot be before PERSON_DOB") - age_in_year = date_of_vaccination.year - date_of_birth.year + age = date_of_vaccination.year - date_of_birth.year if (date_of_vaccination.month, date_of_vaccination.day) < (date_of_birth.month, date_of_birth.day): - age_in_year -= 1 + age -= 1 - return age_in_year + return age def get_practitioner_details_from_pds(nhs_number: str) -> str | None: diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 44841923ca..96f7985602 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -16,47 +16,41 @@ class TestCalculateAgeAtVaccination(unittest.TestCase): """Tests for age calculation at vaccination time.""" - def test_age_calculation_yyyymmdd_format(self): - birth_date = "20040609" - vaccination_date = "20260212" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 21) - - def test_age_calculation_with_time(self): - birth_date = "20040609T120000" - vaccination_date = "20260212T174437" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 21) - - def test_age_calculation_after_birthday(self): - birth_date = "20040609" - vaccination_date = "20260815" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 22) - - def test_age_calculation_on_birthday(self): - birth_date = "20040609" - vaccination_date = "20260609" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 22) - - def test_age_calculation_infant(self): - birth_date = "20260609" - vaccination_date = "20260915" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 0) - - def test_age_calculation_leap_year_birthday(self): - birth_date = "20000229" - vaccination_date = "20240228" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 23) - - def test_age_calculation_same_day_different_year(self): - birth_date = "20000101" - vaccination_date = "20250101" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 25) + def test_age_calculation_core_cases(self): + cases = [ + ("20040609", "20260212", 21), # YYYYMMDD format + ("20040609", "20260609", 22), # On birthday + ("20040609", "20260815", 22), # After birthday + ("20260609", "20260915", 0), # Infant + ("20040609T120000", "20260212T17443700", 21), # With time + ("20000101", "20250101", 25), # Same day different year + ("20000229", "20240228", 23), # Leap year birthday + ("20000229", "20240229", 24), # Leap year birthday on leap day + ("20000229", "20250228", 24), # day before; birthday hasn't happened yet + ] + + for birth_date, vaccination_date, expected_age in cases: + with self.subTest(birth_date=birth_date, vaccination_date=vaccination_date): + self.assertEqual( + calculate_age_at_vaccination(birth_date, vaccination_date), + expected_age, + ) + + def test_rejects_invalid_birth_date_format(self): + with self.assertRaisesRegex(ValueError, "PERSON_DOB"): + calculate_age_at_vaccination("2004-06-09", "20260212") + + def test_rejects_invalid_vaccination_date_format(self): + with self.assertRaisesRegex(ValueError, "DATE_AND_TIME"): + calculate_age_at_vaccination("20040609", "2026-02-12") + + def test_rejects_nonexistent_birth_date(self): + with self.assertRaisesRegex(ValueError, "PERSON_DOB"): + calculate_age_at_vaccination("20040230", "20260212") + + def test_rejects_vaccination_before_birth(self): + with self.assertRaisesRegex(ValueError, "cannot be before"): + calculate_age_at_vaccination("20260212", "20250212") class TestCreateMnsNotification(unittest.TestCase): From 85f7fbc9e1c21f8f25c302545d536dcd92bda1bd Mon Sep 17 00:00:00 2001 From: Akinola Olutola Date: Tue, 17 Mar 2026 09:53:37 +0000 Subject: [PATCH 08/14] VED-982: Consolidate MNS Staging and Master (#1300) * Staging/group all bumps --- README.md | 25 +-- .../instance/policies/secret_manager.json | 2 +- lambdas/backend/poetry.lock | 10 +- lambdas/backend/pyproject.toml | 2 +- lambdas/filenameprocessor/poetry.lock | 10 +- lambdas/filenameprocessor/pyproject.toml | 2 +- lambdas/id_sync/poetry.lock | 10 +- lambdas/id_sync/pyproject.toml | 2 +- lambdas/id_sync/tests/test_pds_details.py | 20 ++ lambdas/mns_subscription/poetry.lock | 10 +- lambdas/mns_subscription/pyproject.toml | 2 +- lambdas/mns_subscription/src/mns_setup.py | 20 ++ lambdas/recordforwarder/poetry.lock | 10 +- lambdas/recordforwarder/pyproject.toml | 2 +- manifest_template.yml | 4 +- package-lock.json | 6 +- tests/perf_tests/Makefile | 4 + tests/perf_tests/Readme.md | 10 + tests/perf_tests/pyproject.toml | 24 +++ tests/perf_tests/src/locustfile.py | 176 ++++++++++++++++++ 20 files changed, 304 insertions(+), 47 deletions(-) create mode 100644 lambdas/id_sync/tests/test_pds_details.py create mode 100644 lambdas/mns_subscription/src/mns_setup.py create mode 100644 tests/perf_tests/Makefile create mode 100644 tests/perf_tests/Readme.md create mode 100644 tests/perf_tests/pyproject.toml create mode 100644 tests/perf_tests/src/locustfile.py diff --git a/README.md b/README.md index e2c139813b..0d174ca15a 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ See https://nhsd-confluence.digital.nhs.uk/display/APM/Glossary. | `id_sync` | **Imms Cross-cutting** – Handles [MNS](https://digital.nhs.uk/developer/api-catalogue/multicast-notification-service) NHS Number Change events and applies updates to affected records. | | `mesh_processor` | **Imms Batch** – Triggered when new files are received via MESH. Moves them into the Imms Batch processing system. | | `mns_subscription` | **Imms Cross-cutting** – Simple helper Lambda which sets up our required MNS subscription. Used in pipelines in DEV. | +| `perf_tests` | **Imms API** – Locust performance tests for the Immunisation API. | | `recordforwarder` | **Imms Batch** – Consumes from the stream and applies the processed batch file row operations (CUD) to IEDS. | | `recordprocessor` | **Imms Batch** – ECS Task - **not** a Lambda function - responsible for processing batch file rows and forwarding to the stream. | | `redis_sync` | **Imms Cross-cutting** – Handles config file updates. E.g. disease mapping or permission files. | @@ -142,22 +143,12 @@ Steps: pip install poetry ``` -### Install Pre-Commit Hooks - -[Husky](https://typicode.github.io/husky/) is used to perform automatic checks upon making a commit. -It is configured within `.husky/pre-commit` to run the checks defined in the root level `package.json` under `lint-staged`. -To set this up: - -1. Ensure you have installed nodejs at the same version or later as per .tool-versions and +8. Install pre-commit hooks. Ensure you have installed nodejs at the same version or later as per .tool-versions and then, from the repo root, run: - ``` npm install ``` -2. Run `cd quality_checks` then `poetry install --no-root`. This will make sure your version of ruff is the same as used in the GitHub pipeline. - You can check your version is correct by running `poetry run ruff --version` from within the `quality_checks` directory and comparing to the version in the poetry.lock file. - ### Setting up a virtual environment with poetry The steps below must be performed in each Lambda function folder and e2e_automation folder to ensure the environment is correctly configured. @@ -216,6 +207,18 @@ Steps: It is not necessary to activate the virtual environment (using `source .venv/bin/activate`) before running a unit test suite from the command line; `direnv` will pick up the correct configurations for us. Run `pip list` to verify that the expected packages are installed. You should for example see that `recordprocessor` is specifically running `moto` v4, regardless of which if any `.venv` is active. +### Setting up the root level environment + +The root-level virtual environment is primarily used for linting, as we create separate virtual environments for each folder that contains Lambda functions. +Steps: + +1. Follow instructions above to [install dependencies](#install-dependencies) & [set up a virtual environment](#setting-up-a-virtual-environment-with-poetry). + **Note: While this project uses Python 3.11 (e.g. for Lambdas), the NHSDigital/api-management-utils repository — which orchestrates setup and linting — defaults to Python 3.8. + The linting command is executed from within that repo but calls the Makefile in this project, so be aware of potential Python version mismatches when running or debugging locally or in the pipeline.** +2. Run `make lint`. This will: + - Check the linting of the API specification yaml. + - Run Flake8 on all Python files in the repository, excluding files inside .venv and .terraform directories. + ## IDE setup The current team uses VS Code mainly. So this setup is targeted towards VS code. If you use another IDE please add the documentation to set up workspaces here. diff --git a/infrastructure/instance/policies/secret_manager.json b/infrastructure/instance/policies/secret_manager.json index 163787e11a..ea402d6b8d 100644 --- a/infrastructure/instance/policies/secret_manager.json +++ b/infrastructure/instance/policies/secret_manager.json @@ -4,7 +4,7 @@ { "Effect": "Allow", "Action": "secretsmanager:GetSecretValue", - "Resource": "arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/pds/*/*" + "Resource": "arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/outbound/*/*" } ] } diff --git a/lambdas/backend/poetry.lock b/lambdas/backend/poetry.lock index 4ae1496a62..99331e8e85 100644 --- a/lambdas/backend/poetry.lock +++ b/lambdas/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "async-timeout" @@ -1323,14 +1323,14 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, - {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, + {file = "pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e"}, + {file = "pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02"}, ] [package.dependencies] @@ -1774,4 +1774,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "be967b3a09c7fd75f709d9959fa79d0fb0c5308645177543bd90b7b51c02fc3f" +content-hash = "e355a57295b17f9c0d72b8de72df38e91a56239b1b5b18b493f570e2ead0a5c4" diff --git a/lambdas/backend/pyproject.toml b/lambdas/backend/pyproject.toml index 98d191e9b7..74975499a2 100644 --- a/lambdas/backend/pyproject.toml +++ b/lambdas/backend/pyproject.toml @@ -20,7 +20,7 @@ moto = "^5.1.20" requests = "~2.32.5" responses = "~0.25.7" pydantic = "~1.10.13" -pyjwt = {extras = ["crypto"], version = "^2.11.0"} +pyjwt = {extras = ["crypto"], version = "^2.12.0"} jsonpath-ng = "^1.6.0" simplejson = "^3.20.2" structlog = "^24.1.0" diff --git a/lambdas/filenameprocessor/poetry.lock b/lambdas/filenameprocessor/poetry.lock index 3cf8bf2dcc..80734eab74 100644 --- a/lambdas/filenameprocessor/poetry.lock +++ b/lambdas/filenameprocessor/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "async-timeout" @@ -1346,14 +1346,14 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, - {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, + {file = "pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e"}, + {file = "pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02"}, ] [package.dependencies] @@ -1795,4 +1795,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "3e60e5a96e20d62320455932b3d82cc26be82f94f14b6fb1af9ad7a674369810" +content-hash = "448565616222aec002ff6b29a5679bb7aaf1a2a40760451df68f96f1cff23a7f" diff --git a/lambdas/filenameprocessor/pyproject.toml b/lambdas/filenameprocessor/pyproject.toml index 263464e17c..fd3b4f8ef7 100644 --- a/lambdas/filenameprocessor/pyproject.toml +++ b/lambdas/filenameprocessor/pyproject.toml @@ -19,7 +19,7 @@ moto = "^4" requests = "~2.32.5" responses = "~0.25.8" pydantic = "~1.10.13" -pyjwt = {extras = ["crypto"], version = "^2.11.0"} +pyjwt = {extras = ["crypto"], version = "^2.12.0"} jsonpath-ng = "^1.6.0" simplejson = "^3.20.2" structlog = "^24.1.0" diff --git a/lambdas/id_sync/poetry.lock b/lambdas/id_sync/poetry.lock index 376d069cea..a02a2f6575 100644 --- a/lambdas/id_sync/poetry.lock +++ b/lambdas/id_sync/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "async-timeout" @@ -708,14 +708,14 @@ files = [ [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, - {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, + {file = "pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e"}, + {file = "pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02"}, ] [package.dependencies] @@ -997,4 +997,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "43e4ecc616b05f9133fe2328e4ebd784b5b11caaa8837d0968c8c1f2f43b2c86" +content-hash = "cb9bcc2a759be202323d45fc6b894c8cfec2f0ac4aa772846cfad68301cb5d3d" diff --git a/lambdas/id_sync/pyproject.toml b/lambdas/id_sync/pyproject.toml index d853de0ff0..d1996f2d53 100644 --- a/lambdas/id_sync/pyproject.toml +++ b/lambdas/id_sync/pyproject.toml @@ -26,7 +26,7 @@ python-stdnum = "^2.1" coverage = "^7.13.2" redis = "^4.6.0" cache = "^1.0.3" -pyjwt = {extras = ["crypto"], version = "~2.11.0"} +pyjwt = {extras = ["crypto"], version = "~2.12.0"} [tool.poetry.group.dev.dependencies] coverage = "^7.13.2" diff --git a/lambdas/id_sync/tests/test_pds_details.py b/lambdas/id_sync/tests/test_pds_details.py new file mode 100644 index 0000000000..92bff58a6c --- /dev/null +++ b/lambdas/id_sync/tests/test_pds_details.py @@ -0,0 +1,20 @@ +import unittest + +from pds_details import get_nhs_number_from_pds_resource + + +class TestGetNhsNumber(unittest.TestCase): + def test_get_nhs_number_from_pds_resource(self): + """Test that the NHS Number is retrieved from a full PDS patient resource.""" + mock_pds_resource = { + "identifier": [ + { + "system": "https://fhir.nhs.uk/Id/nhs-number", + "value": "123456789012", + } + ] + } + + result = get_nhs_number_from_pds_resource(mock_pds_resource) + + self.assertEqual(result, "123456789012") diff --git a/lambdas/mns_subscription/poetry.lock b/lambdas/mns_subscription/poetry.lock index 869fc4ddad..4894377180 100644 --- a/lambdas/mns_subscription/poetry.lock +++ b/lambdas/mns_subscription/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "boto3" @@ -669,14 +669,14 @@ files = [ [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, - {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, + {file = "pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e"}, + {file = "pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02"}, ] [package.dependencies] @@ -912,4 +912,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "2e3ac115064ad661adecc8625cb7bd15d45dcfeb2eaffc110d4616b9d82c0f54" +content-hash = "09911ae68530d3aa8c99ad85637d85bceea7444e4ad84c6bda9e82892b970ce5" diff --git a/lambdas/mns_subscription/pyproject.toml b/lambdas/mns_subscription/pyproject.toml index 4d0e88fc45..40fd9297be 100644 --- a/lambdas/mns_subscription/pyproject.toml +++ b/lambdas/mns_subscription/pyproject.toml @@ -11,7 +11,7 @@ packages = [ [tool.poetry.dependencies] python = "~3.11" boto3 = "~1.42.37" -pyjwt = {extras = ["crypto"], version = "~2.11.0"} +pyjwt = {extras = ["crypto"], version = "~2.12.0"} moto = "^5.1.20" coverage = "^7.13.2" requests = "~2.32.5" diff --git a/lambdas/mns_subscription/src/mns_setup.py b/lambdas/mns_subscription/src/mns_setup.py new file mode 100644 index 0000000000..5b97ad68be --- /dev/null +++ b/lambdas/mns_subscription/src/mns_setup.py @@ -0,0 +1,20 @@ +import logging + +import boto3 +from botocore.config import Config + +from common.api_clients.authentication import AppRestrictedAuth +from common.api_clients.mns_service import MnsService + +logging.basicConfig(level=logging.INFO) + + +def get_mns_service(mns_env: str = "int"): + boto_config = Config(region_name="eu-west-2") + + authenticator = AppRestrictedAuth( + secret_manager_client=boto3.client("secretsmanager", config=boto_config), + environment=mns_env, + ) + + return MnsService(authenticator) diff --git a/lambdas/recordforwarder/poetry.lock b/lambdas/recordforwarder/poetry.lock index 4ae1496a62..99331e8e85 100644 --- a/lambdas/recordforwarder/poetry.lock +++ b/lambdas/recordforwarder/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "async-timeout" @@ -1323,14 +1323,14 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, - {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, + {file = "pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e"}, + {file = "pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02"}, ] [package.dependencies] @@ -1774,4 +1774,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "be967b3a09c7fd75f709d9959fa79d0fb0c5308645177543bd90b7b51c02fc3f" +content-hash = "e355a57295b17f9c0d72b8de72df38e91a56239b1b5b18b493f570e2ead0a5c4" diff --git a/lambdas/recordforwarder/pyproject.toml b/lambdas/recordforwarder/pyproject.toml index 7dc8e98c92..14d589874b 100644 --- a/lambdas/recordforwarder/pyproject.toml +++ b/lambdas/recordforwarder/pyproject.toml @@ -20,7 +20,7 @@ moto = "^5.1.20" requests = "~2.32.5" responses = "~0.25.7" pydantic = "~1.10.13" -pyjwt = {extras = ["crypto"], version = "^2.11.0"} +pyjwt = {extras = ["crypto"], version = "^2.12.0"} jsonpath-ng = "^1.6.0" simplejson = "^3.20.2" structlog = "^24.1.0" diff --git a/manifest_template.yml b/manifest_template.yml index 0e99f86db1..c50f6e0a1f 100644 --- a/manifest_template.yml +++ b/manifest_template.yml @@ -5,7 +5,7 @@ APIGEE_ENVIRONMENTS: display_name_suffix: Internal Development ratelimiting: immunisation-fhir-api-internal-dev: - # 5 requests per second on average + # 500 requests per second on average quota: enabled: true limit: 300 @@ -112,7 +112,7 @@ APIGEE_ENVIRONMENTS: limit: 300 interval: 1 timeunit: minute - # 10 requests per second max + # 1000 requests per second max spikeArrest: enabled: true ratelimit: 600pm diff --git a/package-lock.json b/package-lock.json index 3d1525ae6b..b16ad76016 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2868,9 +2868,9 @@ } }, "node_modules/undici": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz", - "integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==", + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.24.1.tgz", + "integrity": "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA==", "dev": true, "license": "MIT", "engines": { diff --git a/tests/perf_tests/Makefile b/tests/perf_tests/Makefile new file mode 100644 index 0000000000..d15b7705e3 --- /dev/null +++ b/tests/perf_tests/Makefile @@ -0,0 +1,4 @@ +test: + poetry run locust -f src/locustfile.py + +.PHONY: test \ No newline at end of file diff --git a/tests/perf_tests/Readme.md b/tests/perf_tests/Readme.md new file mode 100644 index 0000000000..d33fb8abe9 --- /dev/null +++ b/tests/perf_tests/Readme.md @@ -0,0 +1,10 @@ +# Perf tests + +This project contains Locust performance tests for the Immunisation FHIR API. + +To run them, ensure you have the +`APIGEE_ENVIRONMENT` : Currently, only the ref environment is supported. +`PERF_SUPPLIER_SYSTEM` : `EMIS` or `TPP` +`PERF_CREATE_RPS_PER_USER` : numeric + +env vars set, and call `make test`. diff --git a/tests/perf_tests/pyproject.toml b/tests/perf_tests/pyproject.toml new file mode 100644 index 0000000000..758dadbd82 --- /dev/null +++ b/tests/perf_tests/pyproject.toml @@ -0,0 +1,24 @@ +[tool.poetry] +name = "perf-tests" +version = "0.1.0" +description = "" +authors = ["Matt Jarvis matt.jarvis2@nhs.net"] +readme = "README.md" +packages = [ +{ include = "objectModels", from = "../e2e_automation/src" }, +{ include = "utilities", from = "../e2e_automation" } +] + +[tool.poetry.dependencies] +python = ">=3.11,<3.12" +locust = ">=2.42.3,<3.0.0" +pyjwt = { version = ">=2.11.0,<3.0.0", extras = ["crypto"] } +boto3 = ">=1.42.59,<2.0.0" +shared = { path = "../../lambdas/shared", develop = true } +pandas = "2.3.0" +pydantic = "1.10.13" +typing_extensions = "~4.15.0" + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/tests/perf_tests/src/locustfile.py b/tests/perf_tests/src/locustfile.py new file mode 100644 index 0000000000..5841a6a748 --- /dev/null +++ b/tests/perf_tests/src/locustfile.py @@ -0,0 +1,176 @@ +import json +import os +import random +import uuid +from pathlib import Path +from urllib.parse import urlencode + +import pandas as pd +from locust import HttpUser, constant_throughput, task + +from common.api_clients.authentication import AppRestrictedAuth +from common.clients import get_secrets_manager_client +from objectModels import patient_loader +from objectModels.api_immunization_builder import create_immunization_object +from objectModels.patient_loader import load_patient_by_id + +CONTENT_TYPE_FHIR_JSON = "application/fhir+json" + +APIGEE_ENVIRONMENT = os.getenv("APIGEE_ENVIRONMENT") +if not APIGEE_ENVIRONMENT: + raise ValueError("APIGEE_ENVIRONMENT must be set") + +PERF_SUPPLIER_SYSTEM = os.getenv("PERF_SUPPLIER_SYSTEM", "EMIS").upper() +PERF_CREATE_TASK_RPS_PER_USER = float(os.getenv("PERF_CREATE_RPS_PER_USER", "1")) + +IMMUNIZATION_TARGETS = [ + "3IN1", + "COVID", + "FLU", + "HPV", + "MENACWY", + "MMR", + "MMRV", + "PNEUMOCOCCAL", + "PERTUSSIS", + "RSV", + "SHINGLES", +] + +NHS_NUMBERS = [ + "9160742623", + "9822833040", + "9406813963", + "9505768028", + "9429583158", + "9728553366", + "9153271653", + "9067110124", + "9244495082", + "9940401264", +] + +NHS_SYSTEM = "https://fhir.nhs.uk/Id/nhs-number" +CREATE_SUCCESS_STATUSES = {200, 201, 202} +DELETE_SUCCESS_STATUSES = {200, 202, 204} + +patient_loader.csv_path = str(Path(__file__).resolve().parents[2] / "e2e_automation" / "input" / "testData.csv") + + +def _load_valid_patients(): + patient_df = pd.read_csv(patient_loader.csv_path, dtype=str) + valid_patients = patient_df[patient_df["id"] == "Valid_NHS"]["id"].tolist() + if not valid_patients: + raise ValueError(f"No valid patients found in {patient_loader.csv_path}") + return valid_patients + + +VALID_PATIENT_IDS = _load_valid_patients() + + +class BaseImmunizationUser(HttpUser): + abstract = True + + authenticator = AppRestrictedAuth( + get_secrets_manager_client(), + APIGEE_ENVIRONMENT, + f"imms/perf-tests/{APIGEE_ENVIRONMENT}/jwt-secrets", + ) + host = f"https://{APIGEE_ENVIRONMENT}.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4" + + def get_headers(self): + return { + "Accept": CONTENT_TYPE_FHIR_JSON, + "Authorization": f"Bearer {self.authenticator.get_access_token()}", + "Content-Type": CONTENT_TYPE_FHIR_JSON, + "SupplierSystem": PERF_SUPPLIER_SYSTEM, + "X-Correlation-ID": str(uuid.uuid4()), + "X-Request-ID": str(uuid.uuid4()), + } + + def _build_create_payload(self): + immunization_target = random.choice(IMMUNIZATION_TARGETS) + patient = load_patient_by_id(random.choice(VALID_PATIENT_IDS)) + immunization = create_immunization_object(patient, immunization_target) + return json.loads(immunization.json(exclude_none=True)) + + def _delete_created_immunization(self, immunization_id: str): + headers = self.get_headers() + with self.client.delete( + f"/Immunization/{immunization_id}", + headers=headers, + name="Delete Immunization Cleanup", + catch_response=True, + ) as response: + if response.status_code in DELETE_SUCCESS_STATUSES: + response.success() + else: + response.failure(f"Cleanup delete failed for {immunization_id}: {response.status_code} {response.text}") + + +class SearchUser(BaseImmunizationUser): + wait_time = constant_throughput(1) + + @task + def search_single_vacc_type(self): + nhs_number = random.choice(NHS_NUMBERS) + immunization_target = random.choice(IMMUNIZATION_TARGETS) + query = urlencode( + { + "patient.identifier": f"{NHS_SYSTEM}|{nhs_number}", + "-immunization.target": immunization_target, + } + ) + self.client.get( + f"/Immunization?{query}", + headers=self.get_headers(), + name="Search Single Vaccine Type", + ) + + @task + def search_multiple_vacc_types(self): + nhs_number = random.choice(NHS_NUMBERS) + query = urlencode( + { + "patient.identifier": f"{NHS_SYSTEM}|{nhs_number}", + "-immunization.target": ",".join(IMMUNIZATION_TARGETS), + } + ) + self.client.get( + f"/Immunization?{query}", + headers=self.get_headers(), + name="Search Multiple Vaccine Types", + ) + + +class CreateUser(BaseImmunizationUser): + wait_time = constant_throughput(PERF_CREATE_TASK_RPS_PER_USER) + + @task + def create_immunization(self): + payload = self._build_create_payload() + headers = self.get_headers() + + with self.client.post( + "/Immunization", + json=payload, + headers=headers, + name="Create Immunization", + catch_response=True, + ) as response: + if response.status_code not in CREATE_SUCCESS_STATUSES: + response.failure(f"Create failed: {response.status_code} {response.text}") + return + + location = response.headers.get("Location") or response.headers.get("location") + if not location: + response.failure("Create succeeded without a Location header; cleanup skipped") + return + + created_id = location.rstrip("/").split("/")[-1] + if not created_id: + response.failure(f"Create returned an invalid Location header: {location}") + return + + response.success() + self._delete_created_immunization(created_id) From dc892e120d127be635def23ee18f46b9d4cf3392 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Mar 2026 09:47:01 +0000 Subject: [PATCH 09/14] add type hinting, policies and remove none --- infrastructure/instance/policies/secret_manager.json | 5 ++++- lambdas/mns_publisher/src/process_records.py | 1 - lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py | 0 lambdas/shared/src/common/api_clients/retry.py | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) delete mode 100644 lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py diff --git a/infrastructure/instance/policies/secret_manager.json b/infrastructure/instance/policies/secret_manager.json index ea402d6b8d..e40bef5d41 100644 --- a/infrastructure/instance/policies/secret_manager.json +++ b/infrastructure/instance/policies/secret_manager.json @@ -4,7 +4,10 @@ { "Effect": "Allow", "Action": "secretsmanager:GetSecretValue", - "Resource": "arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/outbound/*/*" + "Resource": [ + "arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/outbound/*/*", + "arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/pds/*/*" + ] } ] } diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index 2b1408daef..8ea67d3cbb 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -78,7 +78,6 @@ def process_record(record: SqsRecord, mns_service: MnsService | MockMnsService) Returns: Failure dict with itemIdentifier if processing failed, None if successful """ message_id, immunisation_id = extract_trace_ids(record) - notification_id = None mns_notification_payload = create_mns_notification(_as_sqs_message(record)) notification_id = mns_notification_payload.get("id") diff --git a/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/lambdas/shared/src/common/api_clients/retry.py b/lambdas/shared/src/common/api_clients/retry.py index 1951fa50f7..1a210a0a41 100644 --- a/lambdas/shared/src/common/api_clients/retry.py +++ b/lambdas/shared/src/common/api_clients/retry.py @@ -12,7 +12,7 @@ def request_with_retry_backoff( headers: dict | None = None, timeout: int = Constants.DEFAULT_API_CLIENTS_TIMEOUT, max_retries: int = Constants.API_CLIENTS_MAX_RETRIES, - data: dict | None = None, + data: str | None = None, ) -> requests.Response: """ Makes an external request with retry and exponential backoff for retryable status codes. From 26f4059b67413106123061fbf19d72244fe6982a Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Mar 2026 14:45:20 +0000 Subject: [PATCH 10/14] add aws powertools deserializer, readme --- lambdas/mns_publisher/README.md | 198 +++++++++++++++++- lambdas/mns_publisher/poetry.lock | 21 +- lambdas/mns_publisher/pyproject.toml | 2 +- .../mns_publisher/src/create_notification.py | 46 ++-- .../tests/test_create_notification.py | 41 ---- 5 files changed, 224 insertions(+), 84 deletions(-) diff --git a/lambdas/mns_publisher/README.md b/lambdas/mns_publisher/README.md index dea5a78fa4..ab3dbad00b 100644 --- a/lambdas/mns_publisher/README.md +++ b/lambdas/mns_publisher/README.md @@ -1,3 +1,197 @@ -# mns-publisher +# MNS Publisher Lambda -Add description - TODO +AWS Lambda function that processes immunisation vaccination records from SQS and publishes notifications to the Messaging Notification Service (MNS). + +## Overview + +The MNS Publisher Lambda function is responsible for: + +- Processing SQS messages containing immunisation event data +- Extracting vaccination record details from DynamoDB stream events +- Creating MNS notification payloads following the CloudEvents specification +- Publishing notifications to the MNS for downstream processing +- Retrieving patient and practitioner details from PDS (Personal Demographics Service) + +## Key Features + +- **SQS Event Processing**: Consumes messages from SQS queues containing immunisation records +- **DynamoDB Stream Integration**: Parses DynamoDB stream event data for vaccination records +- **PDS Integration**: Retrieves patient demographic and practitioner details +- **Error Handling**: Comprehensive logging and error handling using AWS Lambda Powertools +- **Mock Service Support**: Includes mock MNS service for testing and development environments + +## Architecture + +### Event Flow + +1. **SQS Trigger**: Lambda is triggered by messages from an SQS queue +2. **Record Processing**: Each message is processed to extract the vaccination event +3. **Notification Creation**: A CloudEvents-compliant notification is constructed with: + - Patient demographics (NHS number, DOB, age at vaccination) + - Vaccine details (vaccine type, site code) + - Practitioner information (GP ODS code from PDS) + - Immunisation URL reference +4. **MNS Publishing**: Notification is published to the configured MNS environment + +### Notification Payload Structure + +```json +{ + "specversion": "1.0", + "id": "unique-notification-id", + "source": "uk.nhs.vaccinations-data-flow-management", + "type": "imms-vaccination-record-change-1", + "datacontenttype": "application/fhir+json", + "subject": "Immunisation|{imms-id}", + "time": "ISO-8601-timestamp", + "data": { + "nhs_number": "patient-nhs-number", + "vaccine_type": "vaccine-type", + "patient_age": "age-at-vaccination", + "gp_ods_code": "practitioner-ods-code", + "immunisation_url": "reference-url" + } +} +``` + +## Dependencies + +- **python**: ~3.11 +- **aws-lambda-typing**: ~2.20.0 - Type hints for AWS Lambda +- **aws-lambda-powertools**: 3.24.0 - AWS Lambda observability toolkit +- **boto3**: ~1.42.37 - AWS SDK +- **requests**: ^2.31.0 - HTTP client +- **pyjwt**: ^2.10.1 - JWT token handling + +### Development Dependencies + +- **coverage**: ^7.13.2 - Code coverage measurement +- **moto**: ~5.1.20 - AWS service mocking +- **mypy-boto3-dynamodb**: ^1.42.33 - Type hints for DynamoDB + +## Installation + +### Prerequisites + +- Python 3.11+ +- Poetry package manager +- Docker (for building Lambda deployment package) + +### Local Setup + +```bash +# Install dependencies using Poetry +poetry install + +# Activate the virtual environment +source .venv/bin/activate +``` + +## Usage + +### Running Tests + +```bash +# Run all tests +make test + +# Run tests with coverage report +make coverage-run +make coverage-report + +# Generate HTML coverage report +make coverage-html +``` + +### Building Lambda Package + +```bash +# Build Docker image +make build + +# Package Lambda deployment artifact +make package + +# Artifacts will be created in the ./build directory +``` + +## Environment Variables + +The Lambda function requires the following environment variables: + +- `MNS_ENV`: MNS environment configuration (default: "int") + - Options: "int", "prod", or other configured environments +- `IMMUNIZATION_ENV`: Immunisation service environment +- `IMMUNIZATION_BASE_PATH`: Base path for immunisation service URL + +## Configuration + +The Lambda uses environment-based configuration: + +- **MNS Service**: Automatically selects MnsService or MockMnsService based on MNS_ENV +- **Shared Dependencies**: Uses common utilities from `../shared/src` including: + - MNS API client + - PDS integration + - Service URL resolution + +## Code Structure + +``` +src/ +├── lambda_handler.py # Main Lambda entry point +├── process_records.py # SQS record processing logic +├── create_notification.py # MNS notification payload creation +├── observability.py # Logging configuration +├── constants.py # Static constants +└── __init__.py +``` + +### Key Modules + +- **lambda_handler**: Entry point that receives SQS events +- **process_records**: Processes each SQS record and coordinates notification creation +- **create_notification**: Constructs the CloudEvents-compliant notification payload +- **observability**: AWS Lambda Powertools logger configuration + +## Deployment + +The Lambda is deployed as a Docker container image to AWS Lambda: + +1. Build the Docker image containing the Lambda function +2. Push to AWS ECR (Elastic Container Registry) +3. Configure Lambda to use the container image +4. Set required environment variables +5. Configure SQS as the event source + +## Error Handling + +The function includes error handling for: + +- Missing required fields (NHS number, DOB, vaccination date) +- PDS service failures (invalid NHS numbers, service unavailability) +- Invalid SQS message format +- MNS publishing failures + +Errors are logged using AWS Lambda Powertools for observability and debugging. + +## Monitoring and Observability + +- Uses AWS Lambda Powertools for structured logging +- All processing steps are logged with context information +- Integration with CloudWatch for Lambda metrics and logs +- Error tracking and alerting through CloudWatch alarms + +## Related Components + +- **Shared Library**: `../shared/src/common` - Common utilities including MNS and PDS clients +- **Event Source**: Triggered by SQS messages from immunisation event processing pipeline +- **Downstream**: MNS processes published notifications for delivery to subscribed systems + +## Contributing + +When modifying this Lambda: + +1. Update tests in the `tests/` directory +2. Run `make test` to verify changes +3. Ensure coverage remains above project thresholds +4. Update this README if adding new features or changing behavior diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock index c555a3c7f9..b798f022e5 100644 --- a/lambdas/mns_publisher/poetry.lock +++ b/lambdas/mns_publisher/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aws-lambda-powertools" @@ -98,7 +98,7 @@ version = "2026.1.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, @@ -208,7 +208,7 @@ version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, @@ -522,7 +522,7 @@ version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, @@ -746,6 +746,9 @@ files = [ {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, ] +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + [package.extras] crypto = ["cryptography (>=3.4.0)"] dev = ["coverage[toml] (==7.10.7)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=8.4.2,<9.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] @@ -773,7 +776,7 @@ version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -856,7 +859,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -878,7 +881,7 @@ version = "0.26.0" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37"}, {file = "responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4"}, @@ -940,7 +943,7 @@ version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, @@ -988,4 +991,4 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "add4cde377952585d01c12803a6013f7a1eb4c6720c7653ac78cab2a2af60da6" +content-hash = "92fb0205e8461c8f58b0f583d570697bf43d38ebcb7995e37f10ab8c7f15865f" diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml index 5f7458bde7..d87800ba4f 100644 --- a/lambdas/mns_publisher/pyproject.toml +++ b/lambdas/mns_publisher/pyproject.toml @@ -13,7 +13,7 @@ packages = [ python = "~3.11" aws-lambda-typing = "~2.20.0" coverage = "^7.13.2" -pyjwt = "^2.10.1" +pyjwt = { version = "^2.10.1", extras = ["crypto"] } requests = "^2.31.0" boto3 = "~1.42.37" mypy-boto3-dynamodb = "^1.42.33" diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 0459b4a8b9..bb7e5c8a48 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -2,15 +2,15 @@ import os import uuid from datetime import date, datetime -from typing import Any +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBStreamEvent from aws_lambda_typing.events.sqs import SQSMessage from common.api_clients.constants import MnsNotificationPayload from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from common.get_service_url import get_service_url -from constants import DYNAMO_DB_TYPE_DESCRIPTORS, IMMUNISATION_EVENT_SOURCE, IMMUNISATION_EVENT_TYPE, SPEC_VERSION +from constants import IMMUNISATION_EVENT_SOURCE, IMMUNISATION_EVENT_TYPE, SPEC_VERSION IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") @@ -21,21 +21,23 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) body = json.loads(sqs_event.get("body", "{}")) - new_image = body.get("dynamodb", {}).get("NewImage", {}) - imms_id = _unwrap_dynamodb_value(new_image.get("ImmsID", {})) - supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) - vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) - operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) - - imms_map = new_image.get("Imms", {}).get("M", {}) - nhs_number = _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})) + event = DynamoDBStreamEvent({"Records": [body]}) + record = next(event.records) + new_image = record.dynamodb.new_image + imms_id = new_image.get("ImmsID", {}) + supplier_system = new_image.get("SupplierSystem", "") + vaccine_type = new_image.get("VaccineType", "") + operation = new_image.get("Operation", "") + + imms_data = new_image.get("Imms", {}) + nhs_number = imms_data.get("NHS_NUMBER", "") if not nhs_number: logger.error("Missing required field: Nhs Number") raise ValueError("NHS number is required to create MNS notification") - person_dob = _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})) - date_and_time = _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})) - site_code = _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})) + person_dob = imms_data.get("PERSON_DOB", "") + date_and_time = imms_data.get("DATE_AND_TIME", "") + site_code = imms_data.get("SITE_CODE", "") patient_age = calculate_age_at_vaccination(person_dob, date_and_time) gp_ods_code = get_practitioner_details_from_pds(nhs_number) @@ -124,21 +126,3 @@ def get_practitioner_details_from_pds(nhs_number: str) -> str | None: return None return gp_ods_code - - -def _unwrap_dynamodb_value(value: dict) -> Any: - """ - Unwrap DynamoDB type descriptor to get the actual value. - DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL - """ - if not isinstance(value, dict): - return value - - if "NULL" in value: - return None - - for key in DYNAMO_DB_TYPE_DESCRIPTORS: - if key in value: - return value[key] - - return value diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 96f7985602..a510f99633 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -5,7 +5,6 @@ from constants import IMMUNISATION_EVENT_SOURCE, IMMUNISATION_EVENT_TYPE, SPEC_VERSION from create_notification import ( - _unwrap_dynamodb_value, calculate_age_at_vaccination, create_mns_notification, get_practitioner_details_from_pds, @@ -314,43 +313,3 @@ def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): get_practitioner_details_from_pds("9481152782") self.assertEqual(str(context.exception), "PDS API error") - - -class TestUnwrapDynamodbValue(unittest.TestCase): - """Tests for _unwrap_dynamodb_value helper function.""" - - def test_unwrap_string_type(self): - """Test unwrapping DynamoDB String type.""" - value = {"S": "test-value"} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, "test-value") - - def test_unwrap_number_type(self): - """Test unwrapping DynamoDB Number type.""" - value = {"N": "123"} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, "123") - - def test_unwrap_boolean_type(self): - """Test unwrapping DynamoDB Boolean type.""" - value = {"BOOL": True} - result = _unwrap_dynamodb_value(value) - self.assertTrue(result) - - def test_unwrap_null_type(self): - """Test unwrapping DynamoDB NULL type.""" - value = {"NULL": True} - result = _unwrap_dynamodb_value(value) - self.assertIsNone(result) - - def test_unwrap_map_type(self): - """Test unwrapping DynamoDB Map type.""" - value = {"M": {"key": {"S": "value"}}} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, {"key": {"S": "value"}}) - - def test_unwrap_list_type(self): - """Test unwrapping DynamoDB List type.""" - value = {"L": [{"S": "item1"}, {"S": "item2"}]} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) From ebcc051ae47c57c13f08868d7d97ac5fa6ab7018 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Mar 2026 20:39:43 +0000 Subject: [PATCH 11/14] bump lambda runtime, remove location from obs and datetime validation --- .../instance/modules/mns_publisher/mns_publisher_lambda.tf | 2 +- lambdas/delta_backend/src/observability.py | 3 +-- lambdas/mns_publisher/src/constants.py | 2 -- lambdas/mns_publisher/src/create_notification.py | 3 --- lambdas/mns_publisher/src/observability.py | 2 +- lambdas/mns_publisher/tests/test_create_notification.py | 4 ---- 6 files changed, 3 insertions(+), 13 deletions(-) diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 7c4d9f169d..3c3c127f24 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -182,7 +182,7 @@ resource "aws_lambda_function" "mns_publisher_lambda" { package_type = "Image" image_uri = module.mns_publisher_docker_image.image_uri architectures = ["x86_64"] - timeout = 120 + timeout = 300 vpc_config { subnet_ids = var.private_subnet_ids diff --git a/lambdas/delta_backend/src/observability.py b/lambdas/delta_backend/src/observability.py index efb0004778..13a1776fa7 100644 --- a/lambdas/delta_backend/src/observability.py +++ b/lambdas/delta_backend/src/observability.py @@ -26,8 +26,7 @@ service=_SERVICE_NAME, # Respect LOG_LEVEL env var; default INFO for production safety. level=os.environ.get("LOG_LEVEL", "INFO"), + log_record_order=["level", "message", "function", "line"], # Serialise uncaught exceptions as structured JSON. log_uncaught_exceptions=True, - # Set POWERTOOLS_LOGGER_LOG_CALLABLE_LOCATION=true to re-enable locally. - location=os.environ.get("POWERTOOLS_LOGGER_LOG_CALLABLE_LOCATION", "false").lower() == "true", ) diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index 1896313f58..d906de2b50 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -2,5 +2,3 @@ SPEC_VERSION = "1.0" IMMUNISATION_EVENT_SOURCE = "uk.nhs.vaccinations-data-flow-management" IMMUNISATION_EVENT_TYPE = "imms-vaccination-record-change-1" - -DYNAMO_DB_TYPE_DESCRIPTORS = ("S", "N", "BOOL", "M", "L") diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index bb7e5c8a48..62be91784e 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -83,9 +83,6 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: date_of_birth = _parse_compact_date(birth_date, "PERSON_DOB") date_of_vaccination = _parse_compact_date(vaccination_date, "DATE_AND_TIME") - if date_of_vaccination < date_of_birth: - raise ValueError("DATE_AND_TIME cannot be before PERSON_DOB") - age = date_of_vaccination.year - date_of_birth.year if (date_of_vaccination.month, date_of_vaccination.day) < (date_of_birth.month, date_of_birth.day): age -= 1 diff --git a/lambdas/mns_publisher/src/observability.py b/lambdas/mns_publisher/src/observability.py index 3fbef40ee2..411f821a7c 100644 --- a/lambdas/mns_publisher/src/observability.py +++ b/lambdas/mns_publisher/src/observability.py @@ -16,6 +16,6 @@ logger: Logger = Logger( service=_SERVICE_NAME, level=os.environ.get("LOG_LEVEL", "INFO"), + log_record_order=["level", "message", "function", "line"], log_uncaught_exceptions=True, - location=os.environ.get("POWERTOOLS_LOGGER_LOG_CALLABLE_LOCATION", "false").lower() == "true", ) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index a510f99633..9ac666b39c 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -47,10 +47,6 @@ def test_rejects_nonexistent_birth_date(self): with self.assertRaisesRegex(ValueError, "PERSON_DOB"): calculate_age_at_vaccination("20040230", "20260212") - def test_rejects_vaccination_before_birth(self): - with self.assertRaisesRegex(ValueError, "cannot be before"): - calculate_age_at_vaccination("20260212", "20250212") - class TestCreateMnsNotification(unittest.TestCase): """Tests for MNS notification creation.""" From e83b0fe51a0c34be8723fe61f13c9de346a4bd9a Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 19 Mar 2026 12:41:43 +0000 Subject: [PATCH 12/14] add reasonable visibility timeout --- .../instance/modules/mns_publisher/sqs_mns_outbound_events.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf b/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf index 074d06e94f..5e2a3460d7 100644 --- a/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf +++ b/infrastructure/instance/modules/mns_publisher/sqs_mns_outbound_events.tf @@ -2,7 +2,7 @@ resource "aws_sqs_queue" "mns_outbound_events" { name = "${var.mns_publisher_resource_name_prefix}-queue" fifo_queue = false kms_master_key_id = aws_kms_key.mns_outbound_events.arn - visibility_timeout_seconds = 180 + visibility_timeout_seconds = 450 redrive_policy = jsonencode({ deadLetterTargetArn = aws_sqs_queue.mns_outbound_events_dlq.arn maxReceiveCount = 2 From 4bbfac830a0d8fe1f725fb486ef5967de7f8593b Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 19 Mar 2026 13:13:57 +0000 Subject: [PATCH 13/14] add comment to calculate age func addressing valiadtion concerns --- lambdas/mns_publisher/src/create_notification.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 62be91784e..2f88a81b48 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -79,6 +79,10 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: """ Calculate patient age in years at time of vaccination. Expects dates in format: YYYYMMDD or YYYYMMDDThhmmsszz + Note: This function performs a pure calculation and does not enforce domain validation. + If the vaccination date precedes the birth date, a negative age may be returned. + Validation of date correctness and logical consistency (e.g. vaccination after birth) + is expected to be handled upstream in the data ingestion pipeline. """ date_of_birth = _parse_compact_date(birth_date, "PERSON_DOB") date_of_vaccination = _parse_compact_date(vaccination_date, "DATE_AND_TIME") From 074e855eca7ccfa40c7ac6ffdc48d97f7f3bf915 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 19 Mar 2026 15:51:22 +0000 Subject: [PATCH 14/14] add locale to false --- lambdas/delta_backend/src/observability.py | 2 +- lambdas/mns_publisher/src/observability.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lambdas/delta_backend/src/observability.py b/lambdas/delta_backend/src/observability.py index 13a1776fa7..5c9d31e751 100644 --- a/lambdas/delta_backend/src/observability.py +++ b/lambdas/delta_backend/src/observability.py @@ -26,7 +26,7 @@ service=_SERVICE_NAME, # Respect LOG_LEVEL env var; default INFO for production safety. level=os.environ.get("LOG_LEVEL", "INFO"), - log_record_order=["level", "message", "function", "line"], # Serialise uncaught exceptions as structured JSON. log_uncaught_exceptions=True, + location=False, ) diff --git a/lambdas/mns_publisher/src/observability.py b/lambdas/mns_publisher/src/observability.py index 411f821a7c..8265d4cd8c 100644 --- a/lambdas/mns_publisher/src/observability.py +++ b/lambdas/mns_publisher/src/observability.py @@ -16,6 +16,6 @@ logger: Logger = Logger( service=_SERVICE_NAME, level=os.environ.get("LOG_LEVEL", "INFO"), - log_record_order=["level", "message", "function", "line"], log_uncaught_exceptions=True, + location=False, )