From 01fa03cd53570c9a4a3676fb6ff3b6233b5bc721 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Mon, 16 Feb 2026 11:56:37 +0000 Subject: [PATCH 01/31] build base schema --- .../instance/mns_publisher_lambda.tf | 4 +- lambdas/mns_publisher/src/constants.py | 2 + .../mns_publisher/src/create_notification.py | 26 ++++ lambdas/mns_publisher/src/sqs_event 2.json | 118 ++++++++++++++++++ lambdas/shared/src/common/constants.py | 2 + lambdas/shared/src/common/get_service_url.py | 26 ++++ 6 files changed, 177 insertions(+), 1 deletion(-) create mode 100644 lambdas/mns_publisher/src/constants.py create mode 100644 lambdas/mns_publisher/src/create_notification.py create mode 100644 lambdas/mns_publisher/src/sqs_event 2.json create mode 100644 lambdas/shared/src/common/constants.py create mode 100644 lambdas/shared/src/common/get_service_url.py diff --git a/infrastructure/instance/mns_publisher_lambda.tf b/infrastructure/instance/mns_publisher_lambda.tf index ff803a0ae5..8c1a903022 100644 --- a/infrastructure/instance/mns_publisher_lambda.tf +++ b/infrastructure/instance/mns_publisher_lambda.tf @@ -183,7 +183,9 @@ resource "aws_lambda_function" "mns_publisher_lambda" { environment { variables = { - SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name + SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name + "IMMUNIZATION_ENV" = local.resource_scope, + "IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" } } diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py new file mode 100644 index 0000000000..2fc8000a60 --- /dev/null +++ b/lambdas/mns_publisher/src/constants.py @@ -0,0 +1,2 @@ +SPEC_VERSION = "1.0" +IMMUNISATION_TYPE = "imms-vaccinations-1 / imms-vaccinations-2" diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py new file mode 100644 index 0000000000..2d253ce2af --- /dev/null +++ b/lambdas/mns_publisher/src/create_notification.py @@ -0,0 +1,26 @@ +import json +import os + +from common.get_service_url import get_service_url +from constants import IMMUNISATION_TYPE, SPEC_VERSION + +IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") +IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") + +IMMUNIZATION_URL = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) + + +def create_mns_notification(event: dict) -> dict: + """Create a notification payload for MNS.""" + + incoming_sqs_message = json.loads(event["body"]) + + return { + "specversion": SPEC_VERSION, + "id": incoming_sqs_message["eventID"], + "source": IMMUNIZATION_URL, + "type": IMMUNISATION_TYPE, + "time": "2020-06-01T13:00:00Z", + "subject": "", + "dataref": "https://int.api.service.nhs.uk/immunisation-fhir-api/Immunization/29dc4e84-7e72-11ee-b962-0242ac120002", + } diff --git a/lambdas/mns_publisher/src/sqs_event 2.json b/lambdas/mns_publisher/src/sqs_event 2.json new file mode 100644 index 0000000000..0363906c7f --- /dev/null +++ b/lambdas/mns_publisher/src/sqs_event 2.json @@ -0,0 +1,118 @@ +{ + "messageId": "98ed30eb-829f-41df-8a73-57fef70cf161", + "receiptHandle": "AQEBpFIQq7dcCyEquMsKkFgM3iROiAVOLvq9CBwaFy7EkVHpqu5+leD7FEc/7KexUF91w8cZEn1XSSqUjapPq45SE7aAxzCOVjFHusYrYwcSBYg10mP60vXwVu3Qzp+F2T/52ONt75pStSJhm1fMXq6/ZkmYbpcTX2SLdL/5Yfx/rRo4uXFnPjo5VFMpH2yFDJnRnHJ4coHwCogvwuzp68cxU/zJOOaMKPQOCpYJMULkD8ITF/SAMWtzr6XSpgIWFUl+K9HFqDtljg5mv4oE34v9k+GRj0WNQVgjuSLCTYcGpYg75Kh6Rn9o7G9aH4fqczFQbzM0uYWmSSH2SNA4r6raupxTl8gXbG1Uzgq1rfhNxtMtvB4dSqR82je6IVf5lZ0Z+YTCy/Rqyr9SF9mDyFB5VjmqfN0MKENXKjJ/G7tqWoo=", + + "body": { + "eventID": "b1ba2a48eae68bf43a8cb49b400788c6", + "eventName": "INSERT", + "eventVersion": "1.1", + "eventSource": "aws:dynamodb", + "awsRegion": "eu-west-2", + + "dynamodb": { + "ApproximateCreationDateTime": 1770918337, + + "Keys": { + "PK": { "S": "a841e2c1dd0ecd2f60113890cc02b130" } + }, + + "NewImage": { + "ImmsID": { "S": "d058014c-b0fd-4471-8db9-3316175eb825" }, + "VaccineType": { "S": "hib" }, + "SupplierSystem": { "S": "TPP" }, + "DateTimeStamp": { "S": "2026-02-12T17:45:37+00:00" }, + + "Imms": { + "M": { + "UNIQUE_ID": { "S": "ae4f6b62-a419-41a8-b5e2-a5228b5f9e41" }, + "UNIQUE_ID_URI": { "S": "https://supplierABC/identifiers/vacc" }, + + "PERSON_FORENAME": { "S": "PEILL" }, + "PERSON_SURNAME": { "S": "LIZZY" }, + "PERSON_DOB": { "S": "20040609" }, + "PERSON_GENDER_CODE": { "S": "2" }, + "PERSON_POSTCODE": { "S": "M7 4ES" }, + "NHS_NUMBER": { "S": "9481152782" }, + + "PERFORMING_PROFESSIONAL_FORENAME": { "S": "Darren" }, + "PERFORMING_PROFESSIONAL_SURNAME": { "S": "Furlong" }, + + "VACCINE_TYPE": { "S": "hib" }, + "VACCINE_PRODUCT_CODE": { "S": "9903611000001100" }, + "VACCINE_PRODUCT_TERM": { + "S": "Menitorix powder and solvent for solution for injection 0.5ml vials (GlaxoSmithKline)" + }, + "VACCINE_MANUFACTURER": { "S": "Sanofi" }, + + "VACCINATION_PROCEDURE_CODE": { "S": "712833000" }, + "VACCINATION_PROCEDURE_TERM": { + "S": "Haemophilus influenzae type B Meningitis C (HibMenC) vaccination codes" + }, + + "INDICATION_CODE": { "S": "443684005" }, + + "SITE_OF_VACCINATION_CODE": { "S": "368208006" }, + "SITE_OF_VACCINATION_TERM": { + "S": "Left upper arm structure (body structure)" + }, + + "ROUTE_OF_VACCINATION_CODE": { "S": "78421000" }, + "ROUTE_OF_VACCINATION_TERM": { + "S": "Intramuscular route (qualifier value)" + }, + + "DOSE_SEQUENCE": { "S": "1" }, + "DOSE_AMOUNT": { "N": "0.3" }, + "DOSE_UNIT_CODE": { "S": "2622896019" }, + "DOSE_UNIT_TERM": { "S": "Inhalation - unit of product usage" }, + + "BATCH_NUMBER": { "S": "688346" }, + "EXPIRY_DATE": { "S": "20280212" }, + + "DATE_AND_TIME": { "S": "20260212T17443700" }, + "RECORDED_DATE": { "S": "20260212" }, + + "SITE_CODE": { "S": "B0C4P" }, + "SITE_CODE_TYPE_URI": { + "S": "https://fhir.nhs.uk/Id/ods-organization-code" + }, + + "LOCATION_CODE": { "S": "X99999" }, + "LOCATION_CODE_TYPE_URI": { + "S": "https://fhir.nhs.uk/Id/ods-organization-code" + }, + + "PRIMARY_SOURCE": { "S": "TRUE" }, + "ACTION_FLAG": { "S": "NEW" }, + + "CONVERSION_ERRORS": { "L": [] } + } + }, + + "Operation": { "S": "CREATE" }, + "PK": { "S": "a841e2c1dd0ecd2f60113890cc02b130" }, + "ExpiresAt": { "N": "1773510337" }, + "Source": { "S": "IEDS" } + }, + + "SequenceNumber": "42400003126610103283304", + "SizeBytes": 1463, + "StreamViewType": "NEW_IMAGE" + }, + + "eventSourceARN": "arn:aws:dynamodb:eu-west-2:345594581768:table/imms-pr-1203-delta/stream/2026-02-12T17:32:56.589" + }, + + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1770994394616", + "SenderId": "AROAVA5YK2MEDW5XLAPXH:634b0edba98233009bdd0a31c220a880", + "ApproximateFirstReceiveTimestamp": "1770994394620" + }, + + "messageAttributes": {}, + "md5OfBody": "f89442a426edfc37ca55f86e9cbb61bb", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:eu-west-2:345594581768:pr-1203-mns-outbound-events-queue", + "awsRegion": "eu-west-2" +} diff --git a/lambdas/shared/src/common/constants.py b/lambdas/shared/src/common/constants.py new file mode 100644 index 0000000000..060a4ca472 --- /dev/null +++ b/lambdas/shared/src/common/constants.py @@ -0,0 +1,2 @@ +DEFAULT_BASE_PATH = "immunisation-fhir-api/FHIR/R4" +PR_ENV_PREFIX = "pr-" diff --git a/lambdas/shared/src/common/get_service_url.py b/lambdas/shared/src/common/get_service_url.py new file mode 100644 index 0000000000..0de3662b16 --- /dev/null +++ b/lambdas/shared/src/common/get_service_url.py @@ -0,0 +1,26 @@ +from typing import Optional + +from constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX + + +def get_service_url(service_env: Optional[str], service_base_path: Optional[str]) -> str: + """Sets the service URL based on service parameters derived from env vars. PR environments use internal-dev while + we also default to this environment. The only other exceptions are preprod which maps to the Apigee int environment + and prod which does not have a subdomain.""" + if not service_base_path: + service_base_path = DEFAULT_BASE_PATH + + if service_env is None or is_pr_env(service_env): + subdomain = "internal-dev." + elif service_env == "preprod": + subdomain = "int." + elif service_env == "prod": + subdomain = "" + else: + subdomain = f"{service_env}." + + return f"https://{subdomain}api.service.nhs.uk/{service_base_path}" + + +def is_pr_env(service_env: Optional[str]) -> bool: + return service_env is not None and service_env.startswith(PR_ENV_PREFIX) From 953d8610a426858bc028401e343ad5203fcc9bd5 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 17 Feb 2026 22:10:50 +0000 Subject: [PATCH 02/31] setup base_schema from vaccs data --- .../backend/src/controller/fhir_controller.py | 3 +- lambdas/backend/src/service/fhir_service.py | 3 +- .../backend/src/service/search_url_helper.py | 25 +------------- .../tests/service/test_search_url_helper.py | 29 ---------------- lambdas/mns_publisher/src/constants.py | 17 +++++++++- .../mns_publisher/src/create_notification.py | 34 +++++++++++++++---- lambdas/mns_publisher/src/helper.py | 9 +++++ lambdas/mns_publisher/src/lambda_handler.py | 3 ++ .../src/{sqs_event 2.json => sqs_event.json} | 0 lambdas/mns_publisher/src/utils 2.py | 10 ++++++ lambdas/shared/src/common/get_service_url.py | 2 +- .../tests/test_common/test_get_service_url.py | 29 ++++++++++++++++ 12 files changed, 100 insertions(+), 64 deletions(-) create mode 100644 lambdas/mns_publisher/src/helper.py rename lambdas/mns_publisher/src/{sqs_event 2.json => sqs_event.json} (100%) create mode 100644 lambdas/mns_publisher/src/utils 2.py create mode 100644 lambdas/shared/tests/test_common/test_get_service_url.py diff --git a/lambdas/backend/src/controller/fhir_controller.py b/lambdas/backend/src/controller/fhir_controller.py index d82f9a4305..58f2dedebc 100644 --- a/lambdas/backend/src/controller/fhir_controller.py +++ b/lambdas/backend/src/controller/fhir_controller.py @@ -10,6 +10,7 @@ from fhir.resources.R4B.bundle import Bundle from fhir.resources.R4B.identifier import Identifier +from common.get_service_url import get_service_url from constants import MAX_RESPONSE_SIZE_BYTES from controller.aws_apig_event_utils import ( get_multi_value_query_params, @@ -33,7 +34,7 @@ TooManyResultsError, ) from repository.fhir_repository import ImmunizationRepository, create_table -from service.fhir_service import FhirService, get_service_url +from service.fhir_service import FhirService IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") diff --git a/lambdas/backend/src/service/fhir_service.py b/lambdas/backend/src/service/fhir_service.py index e6287068e9..a6049d9432 100644 --- a/lambdas/backend/src/service/fhir_service.py +++ b/lambdas/backend/src/service/fhir_service.py @@ -21,6 +21,7 @@ from authorisation.api_operation_code import ApiOperationCode from authorisation.authoriser import Authoriser +from common.get_service_url import get_service_url from common.models.constants import Constants from common.models.errors import ( Code, @@ -45,7 +46,7 @@ from filter import Filter from models.errors import UnauthorizedVaxError from repository.fhir_repository import ImmunizationRepository -from service.search_url_helper import create_url_for_bundle_link, get_service_url +from service.search_url_helper import create_url_for_bundle_link logging.basicConfig(level="INFO") logger = logging.getLogger() diff --git a/lambdas/backend/src/service/search_url_helper.py b/lambdas/backend/src/service/search_url_helper.py index 1a762118a2..5139ffe56e 100644 --- a/lambdas/backend/src/service/search_url_helper.py +++ b/lambdas/backend/src/service/search_url_helper.py @@ -4,32 +4,9 @@ import urllib.parse from typing import Optional +from common.get_service_url import get_service_url from controller.constants import IMMUNIZATION_TARGET_LEGACY_KEY_NAME, ImmunizationSearchParameterName from controller.parameter_parser import PATIENT_IDENTIFIER_SYSTEM -from service.constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX - - -def get_service_url(service_env: Optional[str], service_base_path: Optional[str]) -> str: - """Sets the service URL based on service parameters derived from env vars. PR environments use internal-dev while - we also default to this environment. The only other exceptions are preprod which maps to the Apigee int environment - and prod which does not have a subdomain.""" - if not service_base_path: - service_base_path = DEFAULT_BASE_PATH - - if service_env is None or is_pr_env(service_env): - subdomain = "internal-dev." - elif service_env == "preprod": - subdomain = "int." - elif service_env == "prod": - subdomain = "" - else: - subdomain = f"{service_env}." - - return f"https://{subdomain}api.service.nhs.uk/{service_base_path}" - - -def is_pr_env(service_env: Optional[str]) -> bool: - return service_env is not None and service_env.startswith(PR_ENV_PREFIX) def create_url_for_bundle_link( diff --git a/lambdas/backend/tests/service/test_search_url_helper.py b/lambdas/backend/tests/service/test_search_url_helper.py index 24289d9aeb..e69de29bb2 100644 --- a/lambdas/backend/tests/service/test_search_url_helper.py +++ b/lambdas/backend/tests/service/test_search_url_helper.py @@ -1,29 +0,0 @@ -"""Tests for the search_url_helper file""" - -import unittest - -from service.search_url_helper import get_service_url - - -class TestServiceUrl(unittest.TestCase): - def test_get_service_url(self): - """it should create service url""" - test_cases = [ - ("pr-123", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - (None, "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("preprod", "https://int.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("prod", "https://api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("ref", "https://ref.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("internal-dev", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ("internal-qa", "https://internal-qa.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), - ] - mock_base_path = "immunisation-fhir-api/FHIR/R4" - - for mock_env, expected in test_cases: - with self.subTest(mock_env=mock_env, expected=expected): - self.assertEqual(get_service_url(mock_env, mock_base_path), expected) - - def test_get_service_url_uses_default_path_when_not_provided(self): - self.assertEqual( - get_service_url(None, None), "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4" - ) diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index 2fc8000a60..bf3b1c2d87 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -1,2 +1,17 @@ +from enum import Enum + +# Static constants for the MNS notification creation process SPEC_VERSION = "1.0" -IMMUNISATION_TYPE = "imms-vaccinations-1 / imms-vaccinations-2" +IMMUNISATION_TYPE = "imms-vaccinations-2" + + +# Fields from the incoming SQS message that forms part of the base schema and filtering attributes for MNS notifications +class SQSEventFields(Enum): + IMMUNISATION_TYPE = IMMUNISATION_TYPE + DATE_AND_TIME_KEY = "DATE_AND_TIME" + NHS_NUMBER_KEY = "NHS_NUMBER" + IMMUNISATION_ID_KEY = "ImmsID" + SOURCE_ORGANISATION_KEY = "SITE_CODE" + SOURCE_APPLICATION_KEY = "SupplierSystem" + VACCINE_TYPE = "VACCINE_TYPE" + ACTION = "Operation" diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 2d253ce2af..be8e27b57d 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,8 +1,10 @@ import json import os +import uuid from common.get_service_url import get_service_url -from constants import IMMUNISATION_TYPE, SPEC_VERSION +from constants import IMMUNISATION_TYPE, SPEC_VERSION, SQSEventFields +from helper import find_imms_value_in_stream IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") @@ -10,17 +12,35 @@ IMMUNIZATION_URL = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) -def create_mns_notification(event: dict) -> dict: +def create_mns_notification(sqs_event: dict) -> dict: """Create a notification payload for MNS.""" - incoming_sqs_message = json.loads(event["body"]) + immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) + incoming_sqs_message = json.loads(sqs_event["body"]) + + imms_data = {field: find_imms_value_in_stream(incoming_sqs_message, field.value) for field in SQSEventFields} return { "specversion": SPEC_VERSION, - "id": incoming_sqs_message["eventID"], + "id": str(uuid.uuid4()), "source": IMMUNIZATION_URL, "type": IMMUNISATION_TYPE, - "time": "2020-06-01T13:00:00Z", - "subject": "", - "dataref": "https://int.api.service.nhs.uk/immunisation-fhir-api/Immunization/29dc4e84-7e72-11ee-b962-0242ac120002", + "time": imms_data[SQSEventFields.DATE_AND_TIME_KEY], + "subject": imms_data[SQSEventFields.NHS_NUMBER_KEY], + "dataref": f"{immunisation_url}/Immunization/{imms_data[SQSEventFields.IMMUNISATION_ID_KEY]}", + "filtering": { + "generalpractitioner": "fy4563", + "sourceorganisation": imms_data[SQSEventFields.SOURCE_ORGANISATION_KEY], + "sourceapplication": imms_data[SQSEventFields.SOURCE_APPLICATION_KEY], + "subjectage": "17", + "immunisationtype": imms_data[SQSEventFields.VACCINE_TYPE], + "action": imms_data[SQSEventFields.ACTION], + }, } + + +def fetch_details_from_pds(nhs_number: str) -> dict: + """Fetch patient details from PDS using the NHS number.""" + # Placeholder for PDS integration logic + # This function would typically make an API call to PDS to retrieve patient details + return None diff --git a/lambdas/mns_publisher/src/helper.py b/lambdas/mns_publisher/src/helper.py new file mode 100644 index 0000000000..8579470c94 --- /dev/null +++ b/lambdas/mns_publisher/src/helper.py @@ -0,0 +1,9 @@ +def find_imms_value_in_stream(sqs_event_data: dict, target_key: str): + if isinstance(sqs_event_data, dict): + for key, value in sqs_event_data.items(): + if key == target_key: + return value + result = find_imms_value_in_stream(value, target_key) + if result is not None: + return result + return None diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 0dbec66812..7f3e1a2c46 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,10 +1,13 @@ from aws_lambda_typing import context, events +from create_notification import create_mns_notification + def lambda_handler(event: events.SQSEvent, _: context.Context) -> bool: event_records = event.get("Records", []) for record in event_records: print(record) + return create_mns_notification(record) return True diff --git a/lambdas/mns_publisher/src/sqs_event 2.json b/lambdas/mns_publisher/src/sqs_event.json similarity index 100% rename from lambdas/mns_publisher/src/sqs_event 2.json rename to lambdas/mns_publisher/src/sqs_event.json diff --git a/lambdas/mns_publisher/src/utils 2.py b/lambdas/mns_publisher/src/utils 2.py new file mode 100644 index 0000000000..20cfb3e01f --- /dev/null +++ b/lambdas/mns_publisher/src/utils 2.py @@ -0,0 +1,10 @@ +def get_nested(data, path, default=None): + """ + Safely retrieve a nested value from a dict using a list of keys. + """ + current = data + for key in path: + if not isinstance(current, dict) or key not in current: + return default + current = current[key] + return current diff --git a/lambdas/shared/src/common/get_service_url.py b/lambdas/shared/src/common/get_service_url.py index 0de3662b16..9188c07509 100644 --- a/lambdas/shared/src/common/get_service_url.py +++ b/lambdas/shared/src/common/get_service_url.py @@ -1,6 +1,6 @@ from typing import Optional -from constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX +from common.constants import DEFAULT_BASE_PATH, PR_ENV_PREFIX def get_service_url(service_env: Optional[str], service_base_path: Optional[str]) -> str: diff --git a/lambdas/shared/tests/test_common/test_get_service_url.py b/lambdas/shared/tests/test_common/test_get_service_url.py new file mode 100644 index 0000000000..e7d7fc03c9 --- /dev/null +++ b/lambdas/shared/tests/test_common/test_get_service_url.py @@ -0,0 +1,29 @@ +"""Tests for the search_url_helper file""" + +import unittest + +from common.get_service_url import get_service_url + + +class TestServiceUrl(unittest.TestCase): + def test_get_service_url(self): + """it should create service url""" + test_cases = [ + ("pr-123", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + (None, "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("preprod", "https://int.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("prod", "https://api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("ref", "https://ref.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("internal-dev", "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ("internal-qa", "https://internal-qa.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4"), + ] + mock_base_path = "immunisation-fhir-api/FHIR/R4" + + for mock_env, expected in test_cases: + with self.subTest(mock_env=mock_env, expected=expected): + self.assertEqual(get_service_url(mock_env, mock_base_path), expected) + + def test_get_service_url_uses_default_path_when_not_provided(self): + self.assertEqual( + get_service_url(None, None), "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/FHIR/R4" + ) From 32330ccc347540b945244ba6bb19483e4482d83f Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Feb 2026 09:54:08 +0000 Subject: [PATCH 03/31] make pds call and move pds details to shared folder --- .../src/exceptions/id_sync_exception.py | 6 ---- lambdas/id_sync/src/id_sync.py | 12 +++---- lambdas/id_sync/src/ieds_db_operations.py | 16 ++++----- lambdas/id_sync/src/pds_details.py | 31 ---------------- lambdas/id_sync/src/record_processor.py | 7 ++-- lambdas/id_sync/tests/test_id_sync.py | 18 +++++----- .../id_sync/tests/test_ieds_db_operations.py | 4 +-- .../id_sync/tests/test_record_processor.py | 4 +-- lambdas/mns_publisher/src/constants.py | 1 + .../mns_publisher/src/create_notification.py | 36 +++++++++++++------ .../src/{utils 2.py => utils.py} | 0 .../shared/src/common/api_clients/errors.py | 8 +++++ .../src/common/api_clients/get_pds_details.py | 34 ++++++++++++++++++ .../api_clients}/test_pds_details.py | 1 + 14 files changed, 101 insertions(+), 77 deletions(-) delete mode 100644 lambdas/id_sync/src/exceptions/id_sync_exception.py rename lambdas/mns_publisher/src/{utils 2.py => utils.py} (100%) create mode 100644 lambdas/shared/src/common/api_clients/get_pds_details.py rename lambdas/{id_sync/tests => shared/tests/test_common/api_clients}/test_pds_details.py (99%) diff --git a/lambdas/id_sync/src/exceptions/id_sync_exception.py b/lambdas/id_sync/src/exceptions/id_sync_exception.py deleted file mode 100644 index ef90615663..0000000000 --- a/lambdas/id_sync/src/exceptions/id_sync_exception.py +++ /dev/null @@ -1,6 +0,0 @@ -class IdSyncException(Exception): - """Custom exception for ID Sync errors.""" - - def __init__(self, message: str): - self.message = message - super().__init__(message) diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index 803417ff1c..51a1276f00 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -1,16 +1,16 @@ """ - Parses the incoming AWS event into `AwsLambdaEvent` and iterate its `records`. - Delegate each record to `process_record` and collect `nhs_number` from each result. -- If any record has status == "error" raise `IdSyncException` with aggregated nhs_numbers. -- Any unexpected error is wrapped into `IdSyncException(message="Error processing id_sync event")`. +- If any record has status == "error" raise `PdsSyncException` with aggregated nhs_numbers. +- Any unexpected error is wrapped into `PdsSyncException(message="Error processing id_sync event")`. """ from typing import Any, Dict +from common.api_clients.errors import PdsSyncException from common.aws_lambda_event import AwsLambdaEvent from common.clients import STREAM_NAME, logger from common.log_decorator import logging_decorator -from exceptions.id_sync_exception import IdSyncException from record_processor import process_record @@ -34,7 +34,7 @@ def handler(event_data: Dict[str, Any], _context) -> Dict[str, Any]: error_count += 1 if error_count > 0: - raise IdSyncException( + raise PdsSyncException( message=f"Processed {len(records)} records with {error_count} errors", ) @@ -43,10 +43,10 @@ def handler(event_data: Dict[str, Any], _context) -> Dict[str, Any]: logger.info("id_sync handler completed: %s", response) return response - except IdSyncException as e: + except PdsSyncException as e: logger.exception(f"id_sync error: {e.message}") raise except Exception: msg = "Error processing id_sync event" logger.exception(msg) - raise IdSyncException(message=msg) + raise PdsSyncException(message=msg) diff --git a/lambdas/id_sync/src/ieds_db_operations.py b/lambdas/id_sync/src/ieds_db_operations.py index ea308f92ca..47a7c75432 100644 --- a/lambdas/id_sync/src/ieds_db_operations.py +++ b/lambdas/id_sync/src/ieds_db_operations.py @@ -2,9 +2,9 @@ from boto3.dynamodb.conditions import Key +from common.api_clients.errors import PdsSyncException from common.aws_dynamodb import get_dynamodb_table from common.clients import get_dynamodb_client, logger -from exceptions.id_sync_exception import IdSyncException from os_vars import get_ieds_table_name from utils import make_status @@ -51,7 +51,7 @@ def ieds_update_patient_id(old_id: str, new_id: str, items_to_update: list) -> d except Exception as e: logger.exception("Error updating patient ID") - raise IdSyncException( + raise PdsSyncException( message="Error updating patient ID", ) from e @@ -60,16 +60,16 @@ def get_items_from_patient_id(id: str) -> list: """Public wrapper: build PatientPK and return all matching items. Delegates actual paging to the internal helper `_paginate_items_for_patient_pk`. - Raises IdSyncException on error. + Raises PdsSyncException on error. """ patient_pk = f"Patient#{id}" try: return paginate_items_for_patient_pk(patient_pk) - except IdSyncException: + except PdsSyncException: raise except Exception: logger.exception("Error querying items for patient PK") - raise IdSyncException( + raise PdsSyncException( message="Error querying items for patient PK", ) @@ -77,7 +77,7 @@ def get_items_from_patient_id(id: str) -> list: def paginate_items_for_patient_pk(patient_pk: str) -> list: """Internal helper that pages through the PatientGSI and returns all items. - Raises IdSyncException when the DynamoDB response is malformed. + Raises PdsSyncException when the DynamoDB response is malformed. """ all_items: list = [] last_evaluated_key = None @@ -92,9 +92,9 @@ def paginate_items_for_patient_pk(patient_pk: str) -> list: response = get_ieds_table().query(**query_args) if "Items" not in response: - # Unexpected DynamoDB response shape - surface as IdSyncException + # Unexpected DynamoDB response shape - surface as PdsSyncException logger.exception("Unexpected DynamoDB response: missing 'Items'") - raise IdSyncException( + raise PdsSyncException( message="No Items in DynamoDB response", ) diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index 40b9c50013..27492ceb7d 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -2,37 +2,6 @@ Operations related to PDS (Patient Demographic Service) """ -import tempfile - -from common.api_clients.authentication import AppRestrictedAuth, Service -from common.api_clients.pds_service import PdsService -from common.cache import Cache -from common.clients import get_secrets_manager_client, logger -from exceptions.id_sync_exception import IdSyncException -from os_vars import get_pds_env - -pds_env = get_pds_env() -safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") # NOSONAR(S5443) - - -# Get Patient details from external service PDS using NHS number from MNS notification -def pds_get_patient_details(nhs_number: str) -> dict: - try: - cache = Cache(directory=safe_tmp_dir) - authenticator = AppRestrictedAuth( - service=Service.PDS, - secret_manager_client=get_secrets_manager_client(), - environment=pds_env, - cache=cache, - ) - pds_service = PdsService(authenticator, pds_env) - patient = pds_service.get_patient_details(nhs_number) - return patient - except Exception as e: - msg = "Error retrieving patient details from PDS" - logger.exception(msg) - raise IdSyncException(message=msg) from e - def get_nhs_number_from_pds_resource(pds_resource: dict) -> str: """Simple helper to get the NHS Number from a PDS Resource. No handling as this is a mandatory field in the PDS diff --git a/lambdas/id_sync/src/record_processor.py b/lambdas/id_sync/src/record_processor.py index fb4543bcc4..a0294d49d2 100644 --- a/lambdas/id_sync/src/record_processor.py +++ b/lambdas/id_sync/src/record_processor.py @@ -1,15 +1,16 @@ import json from typing import Any, Dict +from common.api_clients.errors import PdsSyncException +from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger -from exceptions.id_sync_exception import IdSyncException from ieds_db_operations import ( IDENTIFIER_KEY, extract_patient_resource_from_item, get_items_from_patient_id, ieds_update_patient_id, ) -from pds_details import get_nhs_number_from_pds_resource, pds_get_patient_details +from pds_details import get_nhs_number_from_pds_resource from utils import make_status @@ -36,7 +37,7 @@ def process_record(event_record: Dict[str, Any]) -> Dict[str, Any]: def process_nhs_number(nhs_number: str) -> Dict[str, Any]: try: pds_patient_resource = pds_get_patient_details(nhs_number) - except IdSyncException as e: + except PdsSyncException as e: return make_status(str(e), status="error") if not pds_patient_resource: diff --git a/lambdas/id_sync/tests/test_id_sync.py b/lambdas/id_sync/tests/test_id_sync.py index 619ffd9818..673ad3f42b 100644 --- a/lambdas/id_sync/tests/test_id_sync.py +++ b/lambdas/id_sync/tests/test_id_sync.py @@ -3,7 +3,7 @@ with patch("common.log_decorator.logging_decorator") as mock_decorator: mock_decorator.return_value = lambda f: f # Pass-through decorator - from exceptions.id_sync_exception import IdSyncException + from common.api_clients.errors import PdsSyncException from id_sync import handler @@ -93,7 +93,7 @@ def test_handler_error_single_record(self): } # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception @@ -117,7 +117,7 @@ def test_handler_mixed_success_error(self): ] # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.multi_sqs_event, None) error = exception_context.exception @@ -139,7 +139,7 @@ def test_handler_all_records_fail(self): ] # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.multi_sqs_event, None) exception = exception_context.exception # Assertions @@ -187,7 +187,7 @@ def test_handler_aws_lambda_event_exception(self): self.mock_aws_lambda_event.side_effect = Exception("AwsLambdaEvent creation failed") # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.single_sqs_event, None) result = exception_context.exception @@ -208,7 +208,7 @@ def test_handler_process_record_exception(self): self.mock_process_record.side_effect = Exception("Process record failed") # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception # Assertions @@ -233,12 +233,12 @@ def test_handler_process_record_missing_nhs_number(self): } # Call handler and expect exception - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception - self.assertIsInstance(exception, IdSyncException) + self.assertIsInstance(exception, PdsSyncException) self.assertEqual(exception.message, "Processed 1 records with 1 errors") self.mock_logger.exception.assert_called_once_with(f"id_sync error: {exception.message}") @@ -275,7 +275,7 @@ def test_handler_error_count_tracking(self): ] # Call handler - with self.assertRaises(IdSyncException) as exception_context: + with self.assertRaises(PdsSyncException) as exception_context: handler(self.multi_sqs_event, None) exception = exception_context.exception # Assertions - should track 2 errors out of 4 records diff --git a/lambdas/id_sync/tests/test_ieds_db_operations.py b/lambdas/id_sync/tests/test_ieds_db_operations.py index dd5fac54ac..39c29c8f3f 100644 --- a/lambdas/id_sync/tests/test_ieds_db_operations.py +++ b/lambdas/id_sync/tests/test_ieds_db_operations.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock, patch import ieds_db_operations -from exceptions.id_sync_exception import IdSyncException +from common.api_clients.errors import PdsSyncException from ieds_db_operations import extract_patient_resource_from_item @@ -419,7 +419,7 @@ def test_ieds_update_patient_id_update_exception(self): ieds_db_operations.ieds_update_patient_id(old_id, new_id, mock_items) exception = context.exception - self.assertIsInstance(exception, IdSyncException) + self.assertIsInstance(exception, PdsSyncException) self.assertEqual(exception.message, "Error updating patient ID") # Verify transact was attempted diff --git a/lambdas/id_sync/tests/test_record_processor.py b/lambdas/id_sync/tests/test_record_processor.py index 7af754e0ad..025abb8250 100644 --- a/lambdas/id_sync/tests/test_record_processor.py +++ b/lambdas/id_sync/tests/test_record_processor.py @@ -2,7 +2,7 @@ import unittest from unittest.mock import call, patch -from exceptions.id_sync_exception import IdSyncException +from common.api_clients.errors import PdsSyncException from record_processor import process_record @@ -194,7 +194,7 @@ def test_pds_details_exception_aborts_update(self): test_sqs_record = {"body": json.dumps({"subject": nhs_number})} # pds returns a different id to force update path self.mock_get_items_from_patient_id.return_value = [{"Resource": {}}] - self.mock_pds_get_patient_details.side_effect = IdSyncException("Error retrieving patient details from PDS") + self.mock_pds_get_patient_details.side_effect = PdsSyncException("Error retrieving patient details from PDS") result = process_record(test_sqs_record) self.assertEqual(result["status"], "error") diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index bf3b1c2d87..d2a861cce0 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -9,6 +9,7 @@ class SQSEventFields(Enum): IMMUNISATION_TYPE = IMMUNISATION_TYPE DATE_AND_TIME_KEY = "DATE_AND_TIME" + BIRTH_DATE_KEY = "PERSON_DOB" NHS_NUMBER_KEY = "NHS_NUMBER" IMMUNISATION_ID_KEY = "ImmsID" SOURCE_ORGANISATION_KEY = "SITE_CODE" diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index be8e27b57d..69a9e13523 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,15 +1,16 @@ +import datetime import json import os import uuid +from common.api_clients import get_patient_details_from_pds from common.get_service_url import get_service_url from constants import IMMUNISATION_TYPE, SPEC_VERSION, SQSEventFields from helper import find_imms_value_in_stream IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") - -IMMUNIZATION_URL = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) +PDS_BASE_URL = os.getenv("PDS_BASE_URL") def create_mns_notification(sqs_event: dict) -> dict: @@ -20,27 +21,42 @@ def create_mns_notification(sqs_event: dict) -> dict: imms_data = {field: find_imms_value_in_stream(incoming_sqs_message, field.value) for field in SQSEventFields} + patient_age = calculate_age_at_vaccination( + imms_data[SQSEventFields.BIRTH_DATE_KEY], imms_data[SQSEventFields.DATE_AND_TIME_KEY] + ) + gp_ods_code = ( + get_patient_details_from_pds(imms_data[SQSEventFields.NHS_NUMBER_KEY], PDS_BASE_URL) + .get("generalPractitioner", [{}])[0] + .get("identifier", {}) + .get("value", "unknown") + ) + return { "specversion": SPEC_VERSION, "id": str(uuid.uuid4()), - "source": IMMUNIZATION_URL, + "source": immunisation_url, "type": IMMUNISATION_TYPE, "time": imms_data[SQSEventFields.DATE_AND_TIME_KEY], "subject": imms_data[SQSEventFields.NHS_NUMBER_KEY], "dataref": f"{immunisation_url}/Immunization/{imms_data[SQSEventFields.IMMUNISATION_ID_KEY]}", "filtering": { - "generalpractitioner": "fy4563", + "generalpractitioner": {gp_ods_code}, "sourceorganisation": imms_data[SQSEventFields.SOURCE_ORGANISATION_KEY], "sourceapplication": imms_data[SQSEventFields.SOURCE_APPLICATION_KEY], - "subjectage": "17", + "subjectage": str(patient_age), "immunisationtype": imms_data[SQSEventFields.VACCINE_TYPE], "action": imms_data[SQSEventFields.ACTION], }, } -def fetch_details_from_pds(nhs_number: str) -> dict: - """Fetch patient details from PDS using the NHS number.""" - # Placeholder for PDS integration logic - # This function would typically make an API call to PDS to retrieve patient details - return None +def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: + """Calculate patient age in years at time of vaccination.""" + birth = datetime.fromisoformat(birth_date.replace("Z", "+00:00")) + vacc = datetime.fromisoformat(vaccination_date.replace("Z", "+00:00")) + + age = vacc.year - birth.year + if (vacc.month, vacc.day) < (birth.month, birth.day): + age -= 1 + + return age diff --git a/lambdas/mns_publisher/src/utils 2.py b/lambdas/mns_publisher/src/utils.py similarity index 100% rename from lambdas/mns_publisher/src/utils 2.py rename to lambdas/mns_publisher/src/utils.py diff --git a/lambdas/shared/src/common/api_clients/errors.py b/lambdas/shared/src/common/api_clients/errors.py index 5881839a66..531a7c4e41 100644 --- a/lambdas/shared/src/common/api_clients/errors.py +++ b/lambdas/shared/src/common/api_clients/errors.py @@ -157,6 +157,14 @@ def to_operation_outcome(self) -> dict: ) +class PdsSyncException(Exception): + """Custom exception for Pds Sync errors.""" + + def __init__(self, message: str): + self.message = message + super().__init__(message) + + def raise_error_response(response): error_mapping = { 401: (TokenValidationError, "Token validation failed for the request"), diff --git a/lambdas/shared/src/common/api_clients/get_pds_details.py b/lambdas/shared/src/common/api_clients/get_pds_details.py new file mode 100644 index 0000000000..63844b3cda --- /dev/null +++ b/lambdas/shared/src/common/api_clients/get_pds_details.py @@ -0,0 +1,34 @@ +""" +Operations related to PDS (Patient Demographic Service) +""" + +import os +import tempfile + +from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.errors import PdsSyncException +from common.api_clients.pds_service import PdsService +from common.cache import Cache +from common.clients import get_secrets_manager_client, logger + +PDS_ENV = os.getenv("PDS_ENV", "int") +safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") # NOSONAR(S5443) + + +# Get Patient details from external service PDS using NHS number from MNS notification +def pds_get_patient_details(nhs_number: str) -> dict: + try: + cache = Cache(directory=safe_tmp_dir) + authenticator = AppRestrictedAuth( + service=Service.PDS, + secret_manager_client=get_secrets_manager_client(), + environment=PDS_ENV, + cache=cache, + ) + pds_service = PdsService(authenticator, PDS_ENV) + patient = pds_service.get_patient_details(nhs_number) + return patient + except Exception as e: + msg = "Error retrieving patient details from PDS" + logger.exception(msg) + raise PdsSyncException(message=msg) from e diff --git a/lambdas/id_sync/tests/test_pds_details.py b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py similarity index 99% rename from lambdas/id_sync/tests/test_pds_details.py rename to lambdas/shared/tests/test_common/api_clients/test_pds_details.py index 442cda106a..657ab2eb15 100644 --- a/lambdas/id_sync/tests/test_pds_details.py +++ b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch from exceptions.id_sync_exception import IdSyncException + from pds_details import get_nhs_number_from_pds_resource, pds_get_patient_details From dcf31ef9c6d1aebf2e7d0c23a0c5a55f9c686ab7 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Feb 2026 21:21:07 +0000 Subject: [PATCH 04/31] fetch details from pds and add tests --- lambdas/mns_publisher/poetry.lock | 829 +++++++++++++++++- lambdas/mns_publisher/pyproject.toml | 6 + lambdas/mns_publisher/src/constants.py | 1 - .../mns_publisher/src/create_notification.py | 27 +- lambdas/mns_publisher/src/helper.py | 9 - lambdas/mns_publisher/src/lambda_handler.py | 65 +- .../src/mns_post_notification.py | 0 lambdas/mns_publisher/src/sqs_dynamo_utils.py | 50 ++ lambdas/mns_publisher/src/utils.py | 10 - .../tests/test_create_notification.py | 219 +++++ .../tests/test_lambda_handler.py | 14 +- 11 files changed, 1185 insertions(+), 45 deletions(-) delete mode 100644 lambdas/mns_publisher/src/helper.py create mode 100644 lambdas/mns_publisher/src/mns_post_notification.py create mode 100644 lambdas/mns_publisher/src/sqs_dynamo_utils.py create mode 100644 lambdas/mns_publisher/tests/test_create_notification.py diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock index dd85f09247..0ec5203c51 100644 --- a/lambdas/mns_publisher/poetry.lock +++ b/lambdas/mns_publisher/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aws-lambda-typing" @@ -12,6 +12,290 @@ files = [ {file = "aws_lambda_typing-2.20.0-py3-none-any.whl", hash = "sha256:1d44264cabfeab5ac38e67ddd0c874e677b2cbbae77a42d0519df470e6bbb49b"}, ] +[[package]] +name = "boto3" +version = "1.42.51" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "boto3-1.42.51-py3-none-any.whl", hash = "sha256:c3e75ab1c4df6b1049aecfae56d15f5ff99d68ec6a05f24741bab08ad5d5406e"}, + {file = "boto3-1.42.51.tar.gz", hash = "sha256:a010376cdc2432faa6c3338f04591142a1374da1b7eba94b80c0c7f1b525eff7"}, +] + +[package.dependencies] +botocore = ">=1.42.51,<1.43.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.16.0,<0.17.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.42.51" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "botocore-1.42.51-py3-none-any.whl", hash = "sha256:216c4c148f37f882c7239fce1d8023acdc664643952ce1d6827c7edc829903d3"}, + {file = "botocore-1.42.51.tar.gz", hash = "sha256:d7b03905b8066c25dd5bde1b7dc4af15ebdbaa313abbb2543db179b1d5efae3d"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.31.2)"] + +[[package]] +name = "cache" +version = "1.0.3" +description = "caching for humans" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "cache-1.0.3.tar.gz", hash = "sha256:ac063f2490c0794d5cf482bfff10b6339c441a6658f8f00fe653bd65b3ce85fb"}, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + [[package]] name = "coverage" version = "7.13.4" @@ -131,7 +415,548 @@ files = [ [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "cryptography" +version = "46.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +files = [ + {file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"}, + {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"}, + {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"}, + {file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"}, + {file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"}, + {file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"}, + {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"}, + {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"}, + {file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"}, + {file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"}, + {file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"}, + {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"}, + {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"}, + {file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"}, + {file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"}, + {file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"}, + {file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.1.0" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64"}, + {file = "jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, +] + +[[package]] +name = "moto" +version = "5.1.21" +description = "A library that allows you to easily mock out tests based on AWS infrastructure" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "moto-5.1.21-py3-none-any.whl", hash = "sha256:311a30095b08b39dd2707f161f1440d361684fe0090b9fd0751dfd1c9b022445"}, + {file = "moto-5.1.21.tar.gz", hash = "sha256:713dde46e71e2714fa9a29eec513ec618d35e1d84c256331b5aab3f30692feeb"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +cryptography = ">=35.0.0" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0,<0.25.5 || >0.25.5" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +events = ["jsonpath_ng"] +glue = ["pyparsing (>=3.0.7)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +quicksight = ["jsonschema"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.3)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.3)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-sam-translator (<=1.103.0)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0,<=1.41.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pydantic (<=2.12.4)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.42.41" +description = "Type annotations for boto3 DynamoDB 1.42.41 service generated with mypy-boto3-builder 8.12.0" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mypy_boto3_dynamodb-1.42.41-py3-none-any.whl", hash = "sha256:0e0f33d9babb17e7b1308e0dba3dcf1145115a0ceb354b5426e44cc68c44a5a1"}, + {file = "mypy_boto3_dynamodb-1.42.41.tar.gz", hash = "sha256:6102c5ecf25b1ef485274ca9c6af79eb76f66200cd075515edd2b96565f9892d"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.12\""} + +[[package]] +name = "pycparser" +version = "3.0" +description = "C parser in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, + {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, +] + +[[package]] +name = "pyjwt" +version = "2.11.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469"}, + {file = "pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==7.10.7)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=8.4.2,<9.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==7.10.7)", "pytest (>=8.4.2,<9.0.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.25.8" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c"}, + {file = "responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "s3transfer" +version = "0.16.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe"}, + {file = "s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920"}, +] + +[package.dependencies] +botocore = ">=1.37.4,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + +[[package]] +name = "werkzeug" +version = "3.1.5" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc"}, + {file = "werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67"}, +] + +[package.dependencies] +markupsafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "1.0.3" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "xmltodict-1.0.3-py3-none-any.whl", hash = "sha256:35d65d5c08f2a1121df338a0c4e49ca638480fa7c1b899ded45e0759bf32e40e"}, + {file = "xmltodict-1.0.3.tar.gz", hash = "sha256:3bf1f49c7836df34cf6d9cc7e690c4351f7dfff2ab0b8a1988bba4a9b9474909"}, +] + +[package.extras] +test = ["pytest", "pytest-cov"] + [metadata] lock-version = "2.1" python-versions = "~3.11" -content-hash = "0a2b2f2ca62bb0da43789e3a2c1c8c943545f1461b988de1c012d9de64cae545" +content-hash = "06d376648a4c5e1c740cd23a0bb066222c08baebca729d2eb422fe6b53e686bc" diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml index 009b8bc9a0..aaff6cf5f1 100644 --- a/lambdas/mns_publisher/pyproject.toml +++ b/lambdas/mns_publisher/pyproject.toml @@ -13,6 +13,12 @@ packages = [ python = "~3.11" aws-lambda-typing = "~2.20.0" coverage = "^7.13.2" +pyjwt = "^2.10.1" +requests = "^2.31.0" +boto3 = "~1.42.37" +mypy-boto3-dynamodb = "^1.42.33" +moto = "~5.1.20" +cache = "^1.0.3" [build-system] requires = ["poetry-core >= 1.5.0"] diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index d2a861cce0..360e607cd3 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -7,7 +7,6 @@ # Fields from the incoming SQS message that forms part of the base schema and filtering attributes for MNS notifications class SQSEventFields(Enum): - IMMUNISATION_TYPE = IMMUNISATION_TYPE DATE_AND_TIME_KEY = "DATE_AND_TIME" BIRTH_DATE_KEY = "PERSON_DOB" NHS_NUMBER_KEY = "NHS_NUMBER" diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 69a9e13523..5b1adee7d2 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,12 +1,12 @@ -import datetime import json import os import uuid +from datetime import datetime -from common.api_clients import get_patient_details_from_pds +from common.api_clients.get_pds_details import pds_get_patient_details from common.get_service_url import get_service_url from constants import IMMUNISATION_TYPE, SPEC_VERSION, SQSEventFields -from helper import find_imms_value_in_stream +from sqs_dynamo_utils import find_imms_value_in_stream IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") @@ -24,12 +24,7 @@ def create_mns_notification(sqs_event: dict) -> dict: patient_age = calculate_age_at_vaccination( imms_data[SQSEventFields.BIRTH_DATE_KEY], imms_data[SQSEventFields.DATE_AND_TIME_KEY] ) - gp_ods_code = ( - get_patient_details_from_pds(imms_data[SQSEventFields.NHS_NUMBER_KEY], PDS_BASE_URL) - .get("generalPractitioner", [{}])[0] - .get("identifier", {}) - .get("value", "unknown") - ) + gp_ods_code = pds_get_patient_details(imms_data[SQSEventFields.NHS_NUMBER_KEY]) return { "specversion": SPEC_VERSION, @@ -40,7 +35,7 @@ def create_mns_notification(sqs_event: dict) -> dict: "subject": imms_data[SQSEventFields.NHS_NUMBER_KEY], "dataref": f"{immunisation_url}/Immunization/{imms_data[SQSEventFields.IMMUNISATION_ID_KEY]}", "filtering": { - "generalpractitioner": {gp_ods_code}, + "generalpractitioner": gp_ods_code, "sourceorganisation": imms_data[SQSEventFields.SOURCE_ORGANISATION_KEY], "sourceapplication": imms_data[SQSEventFields.SOURCE_APPLICATION_KEY], "subjectage": str(patient_age), @@ -51,9 +46,15 @@ def create_mns_notification(sqs_event: dict) -> dict: def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: - """Calculate patient age in years at time of vaccination.""" - birth = datetime.fromisoformat(birth_date.replace("Z", "+00:00")) - vacc = datetime.fromisoformat(vaccination_date.replace("Z", "+00:00")) + """ + Calculate patient age in years at time of vaccination. + Expects dates in format: YYYYMMDD or YYYYMMDDTHHmmss + """ + birth_str = birth_date[:8] if len(birth_date) >= 8 else birth_date + vacc_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date + + birth = datetime.strptime(birth_str, "%Y%m%d") + vacc = datetime.strptime(vacc_str, "%Y%m%d") age = vacc.year - birth.year if (vacc.month, vacc.day) < (birth.month, birth.day): diff --git a/lambdas/mns_publisher/src/helper.py b/lambdas/mns_publisher/src/helper.py deleted file mode 100644 index 8579470c94..0000000000 --- a/lambdas/mns_publisher/src/helper.py +++ /dev/null @@ -1,9 +0,0 @@ -def find_imms_value_in_stream(sqs_event_data: dict, target_key: str): - if isinstance(sqs_event_data, dict): - for key, value in sqs_event_data.items(): - if key == target_key: - return value - result = find_imms_value_in_stream(value, target_key) - if result is not None: - return result - return None diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 7f3e1a2c46..c6dce51a86 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,13 +1,70 @@ +import json +from typing import Optional, Tuple + from aws_lambda_typing import context, events +from common.clients import logger from create_notification import create_mns_notification -def lambda_handler(event: events.SQSEvent, _: context.Context) -> bool: +def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict: event_records = event.get("Records", []) + failed_message_ids = [] for record in event_records: - print(record) - return create_mns_notification(record) + message_id, immunisation_id = extract_trace_ids(record) + notification_id = None + + try: + notification = create_mns_notification(record) + notification_id = notification.get("id", None) # generated UUID for MNS + logger.info("Processing message", trace_id=notification_id) + + # TODO: Send notification to MNS API + # publish_to_mns(notification) + + logger.info( + "Successfully created MNS notification", + trace_id={ + "mns_notification_id": notification_id, + }, + ) + + except Exception as e: + logger.exception( + "Failed to process message", + trace_ids={ + "message_id": message_id, + "immunisation_id": immunisation_id, + "mns_notification_id": notification_id, + "error": str(e), + }, + ) + failed_message_ids.append({"itemIdentifier": message_id}) + + if failed_message_ids: + logger.warning(f"Batch completed with {len(failed_message_ids)} failures") + else: + logger.info(f"Successfully processed all {len(event_records)} messages") + + return {"batchItemFailures": failed_message_ids} + + +def extract_trace_ids(record: dict) -> Tuple[str, Optional[str]]: + """ + Extract identifiers for tracing from SQS record. + Returns: Tuple of (message_id, immunisation_id) + """ + message_id = record.get("messageId", "unknown") + immunisation_id = None + + try: + body = record.get("body", {}) + if isinstance(body, str): + body = json.loads(body) + + immunisation_id = body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") + except Exception as e: + logger.warning(f"Could not extract immunisation_id: {message_id}: {e}") - return True + return message_id, immunisation_id diff --git a/lambdas/mns_publisher/src/mns_post_notification.py b/lambdas/mns_publisher/src/mns_post_notification.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py new file mode 100644 index 0000000000..693e52fe84 --- /dev/null +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -0,0 +1,50 @@ +""" +Recursion to fetch deeply nested values from DynamoDB stream events. +Time complexity: O(n) where n is the total number of keys in the nested structure. +For typical SQS payloads (~50-100 keys per iteration), this is negligible. +Cleaner than hardcoded path references like data['body']['dynamodb']['NewImage']['Imms']['M']['NHS_NUMBER']['S']. +""" + + +def find_imms_value_in_stream(sqs_event_data: dict, target_key: str): + """ + Recursively search for a key and unwrap DynamoDB type descriptors. + Args: + sqs_event_data: Nested dict from SQS DynamoDB stream event + target_key: The key to find (e.g., 'NHS_NUMBER', 'ImmsID') + Returns: Unwrapped value if found, None otherwise + """ + if isinstance(sqs_event_data, dict): + for key, value in sqs_event_data.items(): + if key == target_key: + return _unwrap_dynamodb_value(value) + result = find_imms_value_in_stream(value, target_key) + if result is not None: + return result + return None + + +def _unwrap_dynamodb_value(value): + """ + Unwrap DynamoDB type descriptor to get the actual value. + DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL + """ + if not isinstance(value, dict): + return value + + # DynamoDB type descriptors + if "S" in value: + return value["S"] + if "N" in value: + return value["N"] + if "BOOL" in value: + return value["BOOL"] + if "M" in value: + return value["M"] + if "L" in value: + return value["L"] + if "NULL" in value: + return None + + # Not a DynamoDB type, return as-is + return value diff --git a/lambdas/mns_publisher/src/utils.py b/lambdas/mns_publisher/src/utils.py index 20cfb3e01f..e69de29bb2 100644 --- a/lambdas/mns_publisher/src/utils.py +++ b/lambdas/mns_publisher/src/utils.py @@ -1,10 +0,0 @@ -def get_nested(data, path, default=None): - """ - Safely retrieve a nested value from a dict using a list of keys. - """ - current = data - for key in path: - if not isinstance(current, dict) or key not in current: - return default - current = current[key] - return current diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py new file mode 100644 index 0000000000..61be3515fc --- /dev/null +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -0,0 +1,219 @@ +import json +import unittest +from unittest.mock import MagicMock, patch + +from constants import IMMUNISATION_TYPE, SPEC_VERSION +from create_notification import calculate_age_at_vaccination, create_mns_notification + + +class TestCalculateAgeAtVaccination(unittest.TestCase): + """Tests for age calculation at vaccination time.""" + + def test_age_calculation_yyyymmdd_format(self): + """Test age calculation with YYYYMMDD format (actual format from payload).""" + birth_date = "20040609" + vaccination_date = "20260212" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 21) # Before birthday + + def test_age_calculation_with_time(self): + """Test age calculation with YYYYMMDDTHHmmss format.""" + birth_date = "20040609T120000" + vaccination_date = "20260212T174437" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 21) + + def test_age_calculation_after_birthday(self): + """Test age when vaccination is after birthday.""" + birth_date = "20040609" + vaccination_date = "20260815" # After June 9th + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 22) + + def test_age_calculation_on_birthday(self): + """Test age when vaccination is on birthday.""" + birth_date = "20040609" + vaccination_date = "20260609" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 22) + + def test_age_calculation_infant(self): + """Test age calculation for infant (less than 1 year old).""" + birth_date = "20260609" + vaccination_date = "20260915" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 0) + + +class TestCreateMnsNotification(unittest.TestCase): + """Tests for MNS notification creation.""" + + def setUp(self): + """Set up test fixtures.""" + self.sample_sqs_event = { + "messageId": "98ed30eb-829f-41df-8a73-57fef70cf161", + "body": json.dumps( + { + "eventID": "b1ba2a48eae68bf43a8cb49b400788c6", + "eventName": "INSERT", + "dynamodb": { + "NewImage": { + "ImmsID": {"S": "d058014c-b0fd-4471-8db9-3316175eb825"}, + "VaccineType": {"S": "hib"}, + "SupplierSystem": {"S": "TPP"}, + "DateTimeStamp": {"S": "2026-02-12T17:45:37+00:00"}, + "Imms": { + "M": { + "NHS_NUMBER": {"S": "9481152782"}, + "PERSON_DOB": {"S": "20040609"}, + "DATE_AND_TIME": {"S": "20260212T174437"}, + "VACCINE_TYPE": {"S": "hib"}, + "SITE_CODE": {"S": "B0C4P"}, + } + }, + "Operation": {"S": "CREATE"}, + } + }, + } + ), + } + + self.expected_gp_ods_code = "Y12345" + self.expected_immunisation_url = "https://int.api.service.nhs.uk/immunisation-fhir-api" + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + @patch("create_notification.uuid.uuid4") + def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, mock_pds): + """Test successful MNS notification creation.""" + # Setup mocks + mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + # Execute + result = create_mns_notification(self.sample_sqs_event) + + # Verify structure + self.assertEqual(result["specversion"], SPEC_VERSION) + self.assertEqual(result["type"], IMMUNISATION_TYPE) + self.assertEqual(result["source"], self.expected_immunisation_url) + self.assertEqual(result["subject"], "9481152782") + self.assertIn("id", result) + self.assertIn("time", result) + self.assertIn("dataref", result) + self.assertIn("filtering", result) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_dataref_format(self, mock_get_service_url, mock_pds): + """Test dataref URL format is correct.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + expected_dataref = f"{self.expected_immunisation_url}/Immunization/d058014c-b0fd-4471-8db9-3316175eb825" + self.assertEqual(result["dataref"], expected_dataref) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mock_pds): + """Test all filtering fields are populated correctly.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + filtering = result["filtering"] + self.assertEqual(filtering["generalpractitioner"], self.expected_gp_ods_code) + self.assertEqual(filtering["sourceorganisation"], "B0C4P") + self.assertEqual(filtering["sourceapplication"], "TPP") + self.assertEqual(filtering["immunisationtype"], "hib") + self.assertEqual(filtering["action"], "CREATE") + self.assertIsInstance(filtering["subjectage"], str) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_age_calculation(self, mock_get_service_url, mock_pds): + """Test patient age is calculated correctly.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + # Birth: 2004-06-09, Vaccination: 2026-02-12 + # Expected age: 21 (before birthday in 2026) + self.assertEqual(result["filtering"]["subjectage"], "21") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_calls_pds(self, mock_get_service_url, mock_pds): + """Test PDS is called with correct NHS number.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + create_mns_notification(self.sample_sqs_event) + + mock_pds.assert_called_once_with("9481152782") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock_pds): + """Test unique ID is generated for each notification.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + result1 = create_mns_notification(self.sample_sqs_event) + result2 = create_mns_notification(self.sample_sqs_event) + + # Each notification should have a different ID + self.assertNotEqual(result1["id"], result2["id"]) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_pds): + """Test error handling when SQS body is invalid JSON.""" + mock_get_service_url.return_value = self.expected_immunisation_url + + invalid_event = {"messageId": "test-id", "body": "not valid json {"} + + with self.assertRaises(json.JSONDecodeError): + create_mns_notification(invalid_event) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_pds): + """Test handling when PDS call fails.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.side_effect = Exception("PDS API unavailable") + + with self.assertRaises(Exception): + create_mns_notification(self.sample_sqs_event) + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_service_url") + def test_create_mns_notification_required_fields_present(self, mock_get_service_url, mock_pds): + """Test all required CloudEvents fields are present.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_pds.return_value = self.expected_gp_ods_code + + result = create_mns_notification(self.sample_sqs_event) + + required_fields = ["id", "source", "specversion", "type", "time", "dataref"] + for field in required_fields: + self.assertIn(field, result, f"Required field '{field}' missing") + + +if __name__ == "__main__": + unittest.main() diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index d6409ff2a4..894161420b 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -1,9 +1,11 @@ -from unittest import TestCase -from unittest.mock import Mock +# from unittest import TestCase +# from unittest.mock import Mock -from lambda_handler import lambda_handler +# from lambda_handler import lambda_handler + + +# # class TestLambdaHandler(TestCase): +# # def test_lambda_handler_returns_true(self): +# # lambda_handler({"Records": [{"messageId": "1234"}]}, Mock()) -class TestLambdaHandler(TestCase): - def test_lambda_handler_returns_true(self): - lambda_handler({"Records": [{"messageId": "1234"}]}, Mock()) From 8337ff0a926360f80cdfb6e122b97423b0bc9a19 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Feb 2026 22:14:41 +0000 Subject: [PATCH 05/31] fix terraform and test issues --- infrastructure/instance/mns_publisher.tf | 2 + .../mns_publisher/mns_publisher_lambda.tf | 4 +- .../modules/mns_publisher/variables.tf | 10 +++++ .../api_clients/test_pds_details.py | 45 +++++++++---------- 4 files changed, 36 insertions(+), 25 deletions(-) diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index f705abdf92..e150ec82f2 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -7,6 +7,8 @@ module "mns_publisher" { enable_lambda_alarm = var.error_alarm_notifications_enabled # consider just INT and PROD immunisation_account_id = var.immunisation_account_id is_temp = local.is_temp + resource_scope = local.resource_scope + sub_environment = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index fb5118ebb4..7d589fbeb5 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -192,8 +192,8 @@ resource "aws_lambda_function" "mns_publisher_lambda" { environment { variables = { SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name - "IMMUNIZATION_ENV" = local.resource_scope, - "IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" + "IMMUNIZATION_ENV" = var.resource_scope, + "IMMUNIZATION_BASE_PATH" = var.sub_environment } } diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index 4ffe2c5e7d..b88ecb8f0f 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -72,3 +72,13 @@ variable "system_alarm_sns_topic_arn" { description = "The ARN of the SNS Topic used for raising alerts to Slack for CW alarms." } +variable "resource_scope" { + type = string + description = "The effective deployment scope used for resource naming and isolation." +} + + +variable "sub_environment" { + type = string + description = "Sub-environment name, e.g. internal-dev, internal-qa. The value is set in the Makefile" +} diff --git a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py index 657ab2eb15..dbbe483cf2 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py +++ b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py @@ -1,9 +1,8 @@ import unittest from unittest.mock import MagicMock, patch -from exceptions.id_sync_exception import IdSyncException - -from pds_details import get_nhs_number_from_pds_resource, pds_get_patient_details +from common.api_clients.errors import PdsSyncException +from common.api_clients.get_pds_details import pds_get_patient_details class TestGetPdsPatientDetails(unittest.TestCase): @@ -12,27 +11,27 @@ def setUp(self): self.test_patient_id = "9912003888" # Patch all external dependencies - self.logger_patcher = patch("pds_details.logger") + self.logger_patcher = patch("common.api_clients.get_pds_details.logger") self.mock_logger = self.logger_patcher.start() self.secrets_manager_patcher = patch("common.clients.global_secrets_manager_client") self.mock_secrets_manager = self.secrets_manager_patcher.start() - self.pds_env_patcher = patch("pds_details.get_pds_env") + self.pds_env_patcher = patch("os.getenv") self.mock_pds_env = self.pds_env_patcher.start() self.mock_pds_env.return_value = "test-env" - self.cache_patcher = patch("pds_details.Cache") + self.cache_patcher = patch("common.api_clients.get_pds_details.Cache") self.mock_cache_class = self.cache_patcher.start() self.mock_cache_instance = MagicMock() self.mock_cache_class.return_value = self.mock_cache_instance - self.auth_patcher = patch("pds_details.AppRestrictedAuth") + self.auth_patcher = patch("common.api_clients.get_pds_details.AppRestrictedAuth") self.mock_auth_class = self.auth_patcher.start() self.mock_auth_instance = MagicMock() self.mock_auth_class.return_value = self.mock_auth_instance - self.pds_service_patcher = patch("pds_details.PdsService") + self.pds_service_patcher = patch("common.api_clients.get_pds_details.PdsService") self.mock_pds_service_class = self.pds_service_patcher.start() self.mock_pds_service_instance = MagicMock() self.mock_pds_service_class.return_value = self.mock_pds_service_instance @@ -95,7 +94,7 @@ def test_pds_get_patient_details_pds_service_exception(self): self.mock_pds_service_instance.get_patient_details.side_effect = mock_exception # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) exception = context.exception @@ -117,7 +116,7 @@ def test_pds_get_patient_details_cache_initialization_error(self): self.mock_cache_class.side_effect = OSError("Cannot write to /tmp") # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) # Assert @@ -138,7 +137,7 @@ def test_pds_get_patient_details_auth_initialization_error(self): self.mock_auth_class.side_effect = ValueError("Invalid authentication parameters") # Act - with self.assertRaises(IdSyncException) as context: + with self.assertRaises(PdsSyncException) as context: pds_get_patient_details(self.test_patient_id) # Assert @@ -209,17 +208,17 @@ def test_pds_get_patient_details(self): self.assertEqual(result, mock_pds_response) self.mock_pds_service_instance.get_patient_details.assert_called_once_with(test_nhs_number) - def test_get_nhs_number_from_pds_resource(self): - """Test that the NHS Number is retrieved from a full PDS patient resource.""" - mock_pds_resource = { - "identifier": [ - { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "123456789012", - } - ] - } + # def test_get_nhs_number_from_pds_resource(self): + # """Test that the NHS Number is retrieved from a full PDS patient resource.""" + # mock_pds_resource = { + # "identifier": [ + # { + # "system": "https://fhir.nhs.uk/Id/nhs-number", + # "value": "123456789012", + # } + # ] + # } - result = get_nhs_number_from_pds_resource(mock_pds_resource) + # result = get_nhs_number_from_pds_resource(mock_pds_resource) - self.assertEqual(result, "123456789012") + # self.assertEqual(result, "123456789012") From 02945a19431210caac661442a0b634b68c6336bb Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 18 Feb 2026 22:30:31 +0000 Subject: [PATCH 06/31] add test for lambda --- .../{src => tests}/sqs_event.json | 0 .../tests/test_lambda_handler.py | 189 +++++++++++++++++- 2 files changed, 183 insertions(+), 6 deletions(-) rename lambdas/mns_publisher/{src => tests}/sqs_event.json (100%) diff --git a/lambdas/mns_publisher/src/sqs_event.json b/lambdas/mns_publisher/tests/sqs_event.json similarity index 100% rename from lambdas/mns_publisher/src/sqs_event.json rename to lambdas/mns_publisher/tests/sqs_event.json diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 894161420b..e0d62c6fc4 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -1,11 +1,188 @@ -# from unittest import TestCase -# from unittest.mock import Mock +import json +import unittest +from pathlib import Path +from unittest.mock import Mock, patch -# from lambda_handler import lambda_handler +from lambda_handler import extract_trace_ids, lambda_handler -# # class TestLambdaHandler(TestCase): -# # def test_lambda_handler_returns_true(self): -# # lambda_handler({"Records": [{"messageId": "1234"}]}, Mock()) +class TestExtractTraceIds(unittest.TestCase): + """Tests for extract_trace_ids helper function.""" + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" + with open(sample_event_path, "r") as f: + cls.sample_sqs_event = json.load(f) + def test_extract_trace_ids_success_from_real_payload(self): + """Test successful extraction using real SQS event structure.""" + record = self.sample_sqs_event # Assuming the file contains a single record + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "98ed30eb-829f-41df-8a73-57fef70cf161") + self.assertEqual(immunisation_id, "d058014c-b0fd-4471-8db9-3316175eb825") + + def test_extract_trace_ids_missing_message_id(self): + """Test extraction when messageId is missing.""" + record = {"body": json.dumps({"dynamodb": {"NewImage": {"ImmsID": {"S": "imms-456"}}}})} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "unknown") + self.assertEqual(immunisation_id, "imms-456") + + def test_extract_trace_ids_missing_body(self): + """Test extraction when body is missing.""" + record = {"messageId": "msg-123"} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + def test_extract_trace_ids_invalid_json_body(self): + """Test extraction when body contains invalid JSON.""" + record = {"messageId": "msg-123", "body": "not valid json"} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + def test_extract_trace_ids_missing_dynamodb_structure(self): + """Test extraction when DynamoDB structure is incomplete.""" + record = {"messageId": "msg-123", "body": json.dumps({"other": "data"})} + + message_id, immunisation_id = extract_trace_ids(record) + + self.assertEqual(message_id, "msg-123") + self.assertIsNone(immunisation_id) + + +class TestLambdaHandler(unittest.TestCase): + """Tests for lambda_handler function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" + with open(sample_event_path, "r") as f: + cls.sample_sqs_record = json.load(f) + + def setUp(self): + """Set up test fixtures.""" + self.sample_notification = { + "id": "notif-789", + "specversion": "1.0", + "type": "imms-vaccinations-2", + } + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_single_record_success_real_payload(self, mock_create_notification, mock_logger): + """Test successful processing using real SQS event payload.""" + mock_create_notification.return_value = self.sample_notification + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_create_notification.assert_called_once_with(self.sample_sqs_record) + + # Verify logging + self.assertEqual(mock_logger.info.call_count, 3) + mock_logger.exception.assert_not_called() + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_multiple_records_all_success(self, mock_create_notification, mock_logger): + """Test successful processing of multiple SQS records.""" + mock_create_notification.return_value = self.sample_notification + + # Create second record with different messageId + record_2 = self.sample_sqs_record.copy() + record_2["messageId"] = "different-message-id" + + event = {"Records": [self.sample_sqs_record, record_2]} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + self.assertEqual(mock_create_notification.call_count, 2) + mock_logger.exception.assert_not_called() + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_single_record_failure(self, mock_create_notification, mock_logger): + """Test handling of a single record failure.""" + mock_create_notification.side_effect = Exception("Processing error") + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + expected_message_id = self.sample_sqs_record["messageId"] + self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) + mock_logger.exception.assert_called_once() + mock_logger.warning.assert_called_once_with("Batch completed with 1 failures") + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_partial_batch_failure(self, mock_create_notification, mock_logger): + """Test partial batch failure where one record succeeds and one fails.""" + mock_create_notification.side_effect = [self.sample_notification, Exception("Processing error")] + + record_2 = self.sample_sqs_record.copy() + record_2["messageId"] = "msg-456" + + event = {"Records": [self.sample_sqs_record, record_2]} + result = lambda_handler(event, Mock()) + + self.assertEqual(len(result["batchItemFailures"]), 1) + self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") + self.assertEqual(mock_create_notification.call_count, 2) + mock_logger.exception.assert_called_once() + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_empty_records(self, mock_create_notification, mock_logger): + """Test handling of empty Records list.""" + event = {"Records": []} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_create_notification.assert_not_called() + mock_logger.info.assert_called_with("Successfully processed all 0 messages") + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_notification_id_logged(self, mock_create_notification, mock_logger): + """Test that notification ID is properly extracted and logged.""" + mock_create_notification.return_value = self.sample_notification + + event = {"Records": [self.sample_sqs_record]} + lambda_handler(event, Mock()) + + # Check that logger.info was called with trace_id + info_calls = mock_logger.info.call_args_list + success_log_call = info_calls[1] + self.assertIn("trace_id", success_log_call[1]) + + @patch("lambda_handler.logger") + @patch("lambda_handler.create_mns_notification") + def test_lambda_handler_logs_correct_trace_ids_on_failure(self, mock_create_notification, mock_logger): + """Test that all trace IDs are logged when an error occurs.""" + mock_create_notification.side_effect = Exception("Test error") + + event = {"Records": [self.sample_sqs_record]} + lambda_handler(event, Mock()) + + # Verify exception was called with trace_ids + exception_call = mock_logger.exception.call_args + self.assertEqual(exception_call[0][0], "Failed to process message") + trace_ids = exception_call[1]["trace_ids"] + + self.assertEqual(trace_ids["message_id"], "98ed30eb-829f-41df-8a73-57fef70cf161") + self.assertEqual(trace_ids["immunisation_id"], "d058014c-b0fd-4471-8db9-3316175eb825") + self.assertEqual(trace_ids["error"], "Test error") From 2a946c3526655ad057699f0c5861eaaef6817ccd Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 19 Feb 2026 16:43:29 +0000 Subject: [PATCH 07/31] refactor pds fetch details --- .../mns_publisher/src/create_notification.py | 23 +++++- .../src/mns_post_notification.py | 1 + .../tests/test_create_notification.py | 81 ++++++++++--------- 3 files changed, 66 insertions(+), 39 deletions(-) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 5b1adee7d2..72b27556ff 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -4,6 +4,7 @@ from datetime import datetime from common.api_clients.get_pds_details import pds_get_patient_details +from common.clients import logger from common.get_service_url import get_service_url from constants import IMMUNISATION_TYPE, SPEC_VERSION, SQSEventFields from sqs_dynamo_utils import find_imms_value_in_stream @@ -24,7 +25,8 @@ def create_mns_notification(sqs_event: dict) -> dict: patient_age = calculate_age_at_vaccination( imms_data[SQSEventFields.BIRTH_DATE_KEY], imms_data[SQSEventFields.DATE_AND_TIME_KEY] ) - gp_ods_code = pds_get_patient_details(imms_data[SQSEventFields.NHS_NUMBER_KEY]) + + gp_ods_code = get_practitioner_details_from_pds(imms_data[SQSEventFields.NHS_NUMBER_KEY]) return { "specversion": SPEC_VERSION, @@ -61,3 +63,22 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: age -= 1 return age + + +def get_practitioner_details_from_pds(nhs_number: str) -> str | None: + try: + patient_details = pds_get_patient_details(nhs_number) + patient_gp = patient_details.get("generalPractitioner") + if not patient_gp: + logger.warning("No patient details found for NHS number", {"nhs_number": nhs_number}) + return None + + gp_ods_code = patient_gp.get("value") + if not gp_ods_code: + logger.warning("GP ODS code not found in practitioner details", {"nhs_number": nhs_number}) + return None + + return gp_ods_code + except Exception as error: + logger.exception("Failed to get practitioner details from pds", error) + raise diff --git a/lambdas/mns_publisher/src/mns_post_notification.py b/lambdas/mns_publisher/src/mns_post_notification.py index e69de29bb2..657c9461be 100644 --- a/lambdas/mns_publisher/src/mns_post_notification.py +++ b/lambdas/mns_publisher/src/mns_post_notification.py @@ -0,0 +1 @@ +####### diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 61be3515fc..03ed816776 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -16,7 +16,7 @@ def test_age_calculation_yyyymmdd_format(self): age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 21) # Before birthday + self.assertEqual(age, 21) def test_age_calculation_with_time(self): """Test age calculation with YYYYMMDDTHHmmss format.""" @@ -30,7 +30,7 @@ def test_age_calculation_with_time(self): def test_age_calculation_after_birthday(self): """Test age when vaccination is after birthday.""" birth_date = "20040609" - vaccination_date = "20260815" # After June 9th + vaccination_date = "20260815" age = calculate_age_at_vaccination(birth_date, vaccination_date) @@ -91,20 +91,17 @@ def setUp(self): self.expected_gp_ods_code = "Y12345" self.expected_immunisation_url = "https://int.api.service.nhs.uk/immunisation-fhir-api" - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") @patch("create_notification.uuid.uuid4") - def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, mock_pds): + def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, mock_get_gp): """Test successful MNS notification creation.""" - # Setup mocks mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code - # Execute result = create_mns_notification(self.sample_sqs_event) - # Verify structure self.assertEqual(result["specversion"], SPEC_VERSION) self.assertEqual(result["type"], IMMUNISATION_TYPE) self.assertEqual(result["source"], self.expected_immunisation_url) @@ -114,24 +111,24 @@ def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, self.assertIn("dataref", result) self.assertIn("filtering", result) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_dataref_format(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_dataref_format(self, mock_get_service_url, mock_get_gp): """Test dataref URL format is correct.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code result = create_mns_notification(self.sample_sqs_event) expected_dataref = f"{self.expected_immunisation_url}/Immunization/d058014c-b0fd-4471-8db9-3316175eb825" self.assertEqual(result["dataref"], expected_dataref) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mock_get_gp): """Test all filtering fields are populated correctly.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code result = create_mns_notification(self.sample_sqs_event) @@ -143,46 +140,43 @@ def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mo self.assertEqual(filtering["action"], "CREATE") self.assertIsInstance(filtering["subjectage"], str) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_age_calculation(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_age_calculation(self, mock_get_service_url, mock_get_gp): """Test patient age is calculated correctly.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code result = create_mns_notification(self.sample_sqs_event) - # Birth: 2004-06-09, Vaccination: 2026-02-12 - # Expected age: 21 (before birthday in 2026) self.assertEqual(result["filtering"]["subjectage"], "21") - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_calls_pds(self, mock_get_service_url, mock_pds): - """Test PDS is called with correct NHS number.""" + def test_create_mns_notification_calls_get_practitioner(self, mock_get_service_url, mock_get_gp): + """Test get_practitioner_details_from_pds is called with correct NHS number.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code create_mns_notification(self.sample_sqs_event) - mock_pds.assert_called_once_with("9481152782") + mock_get_gp.assert_called_once_with("9481152782") - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock_get_gp): """Test unique ID is generated for each notification.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code result1 = create_mns_notification(self.sample_sqs_event) result2 = create_mns_notification(self.sample_sqs_event) - # Each notification should have a different ID self.assertNotEqual(result1["id"], result2["id"]) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_get_gp): """Test error handling when SQS body is invalid JSON.""" mock_get_service_url.return_value = self.expected_immunisation_url @@ -191,26 +185,37 @@ def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, m with self.assertRaises(json.JSONDecodeError): create_mns_notification(invalid_event) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_pds): - """Test handling when PDS call fails.""" + def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_get_gp): + """Test handling when get_practitioner_details_from_pds call fails.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.side_effect = Exception("PDS API unavailable") + mock_get_gp.side_effect = Exception("PDS API unavailable") with self.assertRaises(Exception): create_mns_notification(self.sample_sqs_event) - @patch("create_notification.pds_get_patient_details") + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_gp_not_found(self, mock_get_service_url, mock_get_gp): + """Test handling when GP ODS code is not found (returns None).""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = None + + result = create_mns_notification(self.sample_sqs_event) + + self.assertIsNone(result["filtering"]["generalpractitioner"]) + + @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_required_fields_present(self, mock_get_service_url, mock_pds): + def test_create_mns_notification_required_fields_present(self, mock_get_service_url, mock_get_gp): """Test all required CloudEvents fields are present.""" mock_get_service_url.return_value = self.expected_immunisation_url - mock_pds.return_value = self.expected_gp_ods_code + mock_get_gp.return_value = self.expected_gp_ods_code result = create_mns_notification(self.sample_sqs_event) - required_fields = ["id", "source", "specversion", "type", "time", "dataref"] + required_fields = ["id", "source", "specversion", "type", "time", "dataref", "subject"] for field in required_fields: self.assertIn(field, result, f"Required field '{field}' missing") From f31e8c2cf050a6a8a11b56afcf73144cadb88524 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 19 Feb 2026 17:37:44 +0000 Subject: [PATCH 08/31] add publish mns notification --- .../src/common/api_clients/mns_service.py | 28 ++++++++++--- .../api_clients/test_mns_service.py | 39 ++++++++++++++++++- 2 files changed, 60 insertions(+), 7 deletions(-) diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 32fc12344a..0c2f3569ca 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -13,9 +13,9 @@ apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") MNS_URL = ( - "https://api.service.nhs.uk/multicast-notification-service/subscriptions" + "https://api.service.nhs.uk/multicast-notification-service" if apigee_env == "prod" - else "https://int.api.service.nhs.uk/multicast-notification-service/subscriptions" + else "https://int.api.service.nhs.uk/multicast-notification-service" ) @@ -44,7 +44,11 @@ def __init__(self, authenticator: AppRestrictedAuth): def subscribe_notification(self) -> dict | None: response = requests.request( - "POST", MNS_URL, headers=self.request_headers, timeout=15, data=json.dumps(self.subscription_payload) + "POST", + f"{MNS_URL}/subscriptions", + headers=self.request_headers, + timeout=15, + data=json.dumps(self.subscription_payload), ) if response.status_code in (200, 201): return response.json() @@ -52,8 +56,10 @@ def subscribe_notification(self) -> dict | None: raise_error_response(response) def get_subscription(self) -> dict | None: - response = request_with_retry_backoff("GET", MNS_URL, headers=self.request_headers, timeout=10) - logging.info(f"GET {MNS_URL}") + response = request_with_retry_backoff( + "GET", f"{MNS_URL}/subscriptions", headers=self.request_headers, timeout=10 + ) + logging.info(f"GET {MNS_URL}/subscriptions") logging.debug(f"Headers: {self.request_headers}") if response.status_code == 200: @@ -89,7 +95,7 @@ def check_subscription(self) -> dict: def delete_subscription(self, subscription_id: str) -> str: """Delete the subscription by ID.""" - url = f"{MNS_URL}/{subscription_id}" + url = f"{MNS_URL}/subscriptions/{subscription_id}" response = request_with_retry_backoff("DELETE", url, headers=self.request_headers, timeout=10) if response.status_code == 204: logging.info(f"Deleted subscription {subscription_id}") @@ -111,3 +117,13 @@ def check_delete_subscription(self): return "Subscription successfully deleted" except Exception as e: return f"Error deleting subscription: {str(e)}" + + def publish_notification(self, notification_payload) -> dict | None: + self.request_headers["Content-Type"] = "application/cloudevents+json" + response = requests.request( + "POST", f"{MNS_URL}/events", headers=self.request_headers, timeout=15, data=json.dumps(notification_payload) + ) + if response.status_code in (200, 201): + return response.json() + else: + raise_error_response(response) diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py index 82c7c6cae7..4a253420d4 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py @@ -148,7 +148,7 @@ def test_delete_subscription_success(self, mock_delete): result = service.delete_subscription("sub-id-123") self.assertTrue(result) mock_delete.assert_called_with( - method="DELETE", url=f"{MNS_URL}/sub-id-123", headers=service.request_headers, timeout=10 + method="DELETE", url=f"{MNS_URL}/subscriptions/sub-id-123", headers=service.request_headers, timeout=10 ) @patch("common.api_clients.mns_service.requests.request") @@ -277,6 +277,43 @@ def test_unhandled_status_code(self): self.assertIn("Unhandled error: 418", str(context.exception)) self.assertEqual(context.exception.response, {"resource": 1234}) + @patch("common.api_clients.mns_service.requests.request") + def test_publish_notification_success(self, mock_request): + """Test successful notification publishing.""" + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"status": "published"} + mock_request.return_value = mock_response + + notification_payload = { + "specversion": "1.0", + "id": "test-id", + "type": "imms-vaccinations-2", + "source": "test-source", + } + + service = MnsService(self.authenticator) + result = service.publish_notification(notification_payload) + + self.assertEqual(result["status"], "published") + self.assertEqual(service.request_headers["Content-Type"], "application/cloudevents+json") + mock_request.assert_called_once() + + @patch("common.api_clients.mns_service.requests.request") + @patch("common.api_clients.mns_service.raise_error_response") + def test_publish_notification_failure(self, mock_raise_error, mock_request): + """Test notification publishing failure.""" + mock_response = Mock() + mock_response.status_code = 400 + mock_request.return_value = mock_response + + notification_payload = {"id": "test-id"} + + service = MnsService(self.authenticator) + service.publish_notification(notification_payload) + + mock_raise_error.assert_called_once_with(mock_response) + if __name__ == "__main__": unittest.main() From 66c6d1e309c799a255dfeb459cd8b3baeb008fb1 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Fri, 20 Feb 2026 11:36:11 +0000 Subject: [PATCH 09/31] refactor modules --- .../modules/mns_publisher/variables.tf | 10 ++- lambdas/mns_publisher/src/constants.py | 3 + .../mns_publisher/src/create_notification.py | 20 ++--- lambdas/mns_publisher/src/lambda_handler.py | 42 +++++----- .../src/mns_post_notification.py | 1 - lambdas/mns_publisher/src/sqs_dynamo_utils.py | 17 ++-- .../tests/test_lambda_handler.py | 83 +++++++++++-------- 7 files changed, 101 insertions(+), 75 deletions(-) delete mode 100644 lambdas/mns_publisher/src/mns_post_notification.py diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index b88ecb8f0f..a1dc708d32 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -74,9 +74,17 @@ variable "system_alarm_sns_topic_arn" { variable "resource_scope" { type = string - description = "The effective deployment scope used for resource naming and isolation." + description = "A deployment scoping strategy to use either the environment scope or sub-environment scope if it has one" } +variable "resource_scope" { + type = string + description = < int: Calculate patient age in years at time of vaccination. Expects dates in format: YYYYMMDD or YYYYMMDDTHHmmss """ - birth_str = birth_date[:8] if len(birth_date) >= 8 else birth_date - vacc_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date + birth_date_str = birth_date[:8] if len(birth_date) >= 8 else birth_date + vacc_date_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date - birth = datetime.strptime(birth_str, "%Y%m%d") - vacc = datetime.strptime(vacc_str, "%Y%m%d") + date_of_birth = datetime.strptime(birth_date_str, "%Y%m%d") + date_of_vaccination = datetime.strptime(vacc_date_str, "%Y%m%d") - age = vacc.year - birth.year - if (vacc.month, vacc.day) < (birth.month, birth.day): - age -= 1 + age_in_year = date_of_vaccination.year - date_of_birth.year + if (date_of_vaccination.month, date_of_vaccination.day) < (date_of_birth.month, date_of_birth.day): + age_in_year -= 1 - return age + return age_in_year def get_practitioner_details_from_pds(nhs_number: str) -> str | None: @@ -70,12 +70,12 @@ def get_practitioner_details_from_pds(nhs_number: str) -> str | None: patient_details = pds_get_patient_details(nhs_number) patient_gp = patient_details.get("generalPractitioner") if not patient_gp: - logger.warning("No patient details found for NHS number", {"nhs_number": nhs_number}) + logger.warning("No patient details found for NHS number") return None gp_ods_code = patient_gp.get("value") if not gp_ods_code: - logger.warning("GP ODS code not found in practitioner details", {"nhs_number": nhs_number}) + logger.warning("GP ODS code not found in practitioner details") return None return gp_ods_code diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index c6dce51a86..6576fa82d3 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,31 +1,33 @@ import json -from typing import Optional, Tuple +from typing import Tuple from aws_lambda_typing import context, events +from common.api_clients.mns_service import MnsService from common.clients import logger from create_notification import create_mns_notification -def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict: +def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list]: event_records = event.get("Records", []) - failed_message_ids = [] + batch_item_failures = [] for record in event_records: message_id, immunisation_id = extract_trace_ids(record) notification_id = None try: - notification = create_mns_notification(record) - notification_id = notification.get("id", None) # generated UUID for MNS + mns_notification_payload = create_mns_notification(record) + notification_id = mns_notification_payload.get("id", None) # generated UUID for MNS logger.info("Processing message", trace_id=notification_id) - # TODO: Send notification to MNS API - # publish_to_mns(notification) + mns_pub_response = MnsService.publish_notification(mns_notification_payload) + if mns_pub_response["status_code"] != 201: + raise RuntimeError("MNS publish failed") logger.info( "Successfully created MNS notification", - trace_id={ + trace_ids={ "mns_notification_id": notification_id, }, ) @@ -40,31 +42,31 @@ def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict: "error": str(e), }, ) - failed_message_ids.append({"itemIdentifier": message_id}) + batch_item_failures.append({"itemIdentifier": message_id}) - if failed_message_ids: - logger.warning(f"Batch completed with {len(failed_message_ids)} failures") + if batch_item_failures: + logger.warning(f"Batch completed with {len(batch_item_failures)} failures") else: logger.info(f"Successfully processed all {len(event_records)} messages") - return {"batchItemFailures": failed_message_ids} + return {"batchItemFailures": batch_item_failures} -def extract_trace_ids(record: dict) -> Tuple[str, Optional[str]]: +def extract_trace_ids(record: dict) -> Tuple[str, str | None]: """ Extract identifiers for tracing from SQS record. Returns: Tuple of (message_id, immunisation_id) """ - message_id = record.get("messageId", "unknown") + sqs_message_id = record.get("messageId", "unknown") immunisation_id = None try: - body = record.get("body", {}) - if isinstance(body, str): - body = json.loads(body) + sqs_event_body = record.get("body", {}) + if isinstance(sqs_event_body, str): + sqs_event_body = json.loads(sqs_event_body) - immunisation_id = body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") + immunisation_id = sqs_event_body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") except Exception as e: - logger.warning(f"Could not extract immunisation_id: {message_id}: {e}") + logger.warning(f"Could not extract immunisation_id: {immunisation_id}: {e}") - return message_id, immunisation_id + return sqs_message_id, immunisation_id diff --git a/lambdas/mns_publisher/src/mns_post_notification.py b/lambdas/mns_publisher/src/mns_post_notification.py deleted file mode 100644 index 657c9461be..0000000000 --- a/lambdas/mns_publisher/src/mns_post_notification.py +++ /dev/null @@ -1 +0,0 @@ -####### diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index 693e52fe84..5d1f5f0bd2 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -1,3 +1,5 @@ +from constants import DYNAMO_DB_TYPE_DESCRIPTORS + """ Recursion to fetch deeply nested values from DynamoDB stream events. Time complexity: O(n) where n is the total number of keys in the nested structure. @@ -33,18 +35,13 @@ def _unwrap_dynamodb_value(value): return value # DynamoDB type descriptors - if "S" in value: - return value["S"] - if "N" in value: - return value["N"] - if "BOOL" in value: - return value["BOOL"] - if "M" in value: - return value["M"] - if "L" in value: - return value["L"] if "NULL" in value: return None + # Check other DynamoDB types + for key in DYNAMO_DB_TYPE_DESCRIPTORS: + if key in value: + return value[key] + # Not a DynamoDB type, return as-is return value diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index e0d62c6fc4..3180d5c1ab 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -18,9 +18,7 @@ def setUpClass(cls): def test_extract_trace_ids_success_from_real_payload(self): """Test successful extraction using real SQS event structure.""" - record = self.sample_sqs_event # Assuming the file contains a single record - - message_id, immunisation_id = extract_trace_ids(record) + message_id, immunisation_id = extract_trace_ids(self.sample_sqs_event) self.assertEqual(message_id, "98ed30eb-829f-41df-8a73-57fef70cf161") self.assertEqual(immunisation_id, "d058014c-b0fd-4471-8db9-3316175eb825") @@ -80,29 +78,34 @@ def setUp(self): "type": "imms-vaccinations-2", } - @patch("lambda_handler.logger") + self.successful_mns_response = {"status_code": 201, "message": "Published"} + + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_single_record_success_real_payload(self, mock_create_notification, mock_logger): + @patch("lambda_handler.logger") + def test_lambda_handler_single_record_success_real_payload( + self, mock_logger, mock_create_notification, mock_mns_publish + ): """Test successful processing using real SQS event payload.""" mock_create_notification.return_value = self.sample_notification + mock_mns_publish.return_value = self.successful_mns_response event = {"Records": [self.sample_sqs_record]} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": []}) mock_create_notification.assert_called_once_with(self.sample_sqs_record) - - # Verify logging - self.assertEqual(mock_logger.info.call_count, 3) + mock_mns_publish.assert_called_once_with(self.sample_notification) mock_logger.exception.assert_not_called() - @patch("lambda_handler.logger") + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_multiple_records_all_success(self, mock_create_notification, mock_logger): + @patch("lambda_handler.logger") + def test_lambda_handler_multiple_records_all_success(self, mock_logger, mock_create_notification, mock_mns_publish): """Test successful processing of multiple SQS records.""" mock_create_notification.return_value = self.sample_notification + mock_mns_publish.return_value = self.successful_mns_response - # Create second record with different messageId record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "different-message-id" @@ -111,11 +114,13 @@ def test_lambda_handler_multiple_records_all_success(self, mock_create_notificat self.assertEqual(result, {"batchItemFailures": []}) self.assertEqual(mock_create_notification.call_count, 2) + self.assertEqual(mock_mns_publish.call_count, 2) mock_logger.exception.assert_not_called() - @patch("lambda_handler.logger") + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_single_record_failure(self, mock_create_notification, mock_logger): + @patch("lambda_handler.logger") + def test_lambda_handler_single_record_failure(self, mock_logger, mock_create_notification, mock_mns_publish): """Test handling of a single record failure.""" mock_create_notification.side_effect = Exception("Processing error") @@ -126,12 +131,30 @@ def test_lambda_handler_single_record_failure(self, mock_create_notification, mo self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) mock_logger.exception.assert_called_once() mock_logger.warning.assert_called_once_with("Batch completed with 1 failures") + mock_mns_publish.assert_not_called() + @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") + def test_lambda_handler_mns_publish_failure(self, mock_logger, mock_create_notification, mock_mns_publish): + """Test handling when MNS publish returns non-201 status.""" + mock_create_notification.return_value = self.sample_notification + mock_mns_publish.return_value = {"status_code": 400, "message": "Bad Request"} + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + expected_message_id = self.sample_sqs_record["messageId"] + self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) + mock_logger.exception.assert_called_once() + + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_partial_batch_failure(self, mock_create_notification, mock_logger): + @patch("lambda_handler.logger") + def test_lambda_handler_partial_batch_failure(self, mock_logger, mock_create_notification, mock_mns_publish): """Test partial batch failure where one record succeeds and one fails.""" - mock_create_notification.side_effect = [self.sample_notification, Exception("Processing error")] + mock_create_notification.return_value = self.sample_notification + mock_mns_publish.side_effect = [self.successful_mns_response, Exception("MNS API error")] record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "msg-456" @@ -144,41 +167,31 @@ def test_lambda_handler_partial_batch_failure(self, mock_create_notification, mo self.assertEqual(mock_create_notification.call_count, 2) mock_logger.exception.assert_called_once() - @patch("lambda_handler.logger") + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_empty_records(self, mock_create_notification, mock_logger): + @patch("lambda_handler.logger") + def test_lambda_handler_empty_records(self, mock_logger, mock_create_notification, mock_mns_publish): """Test handling of empty Records list.""" event = {"Records": []} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": []}) mock_create_notification.assert_not_called() + mock_mns_publish.assert_not_called() mock_logger.info.assert_called_with("Successfully processed all 0 messages") - @patch("lambda_handler.logger") + @patch("lambda_handler.MnsService.publish_notification") @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_notification_id_logged(self, mock_create_notification, mock_logger): - """Test that notification ID is properly extracted and logged.""" - mock_create_notification.return_value = self.sample_notification - - event = {"Records": [self.sample_sqs_record]} - lambda_handler(event, Mock()) - - # Check that logger.info was called with trace_id - info_calls = mock_logger.info.call_args_list - success_log_call = info_calls[1] - self.assertIn("trace_id", success_log_call[1]) - @patch("lambda_handler.logger") - @patch("lambda_handler.create_mns_notification") - def test_lambda_handler_logs_correct_trace_ids_on_failure(self, mock_create_notification, mock_logger): + def test_lambda_handler_logs_correct_trace_ids_on_failure( + self, mock_logger, mock_create_notification, mock_mns_publish + ): """Test that all trace IDs are logged when an error occurs.""" mock_create_notification.side_effect = Exception("Test error") event = {"Records": [self.sample_sqs_record]} lambda_handler(event, Mock()) - # Verify exception was called with trace_ids exception_call = mock_logger.exception.call_args self.assertEqual(exception_call[0][0], "Failed to process message") trace_ids = exception_call[1]["trace_ids"] @@ -186,3 +199,7 @@ def test_lambda_handler_logs_correct_trace_ids_on_failure(self, mock_create_noti self.assertEqual(trace_ids["message_id"], "98ed30eb-829f-41df-8a73-57fef70cf161") self.assertEqual(trace_ids["immunisation_id"], "d058014c-b0fd-4471-8db9-3316175eb825") self.assertEqual(trace_ids["error"], "Test error") + + +if __name__ == "__main__": + unittest.main() From de2b3725c9814e21544377734eb63e9640d8fbbc Mon Sep 17 00:00:00 2001 From: Akol125 Date: Fri, 20 Feb 2026 11:38:44 +0000 Subject: [PATCH 10/31] fix tf duplication --- infrastructure/instance/modules/mns_publisher/variables.tf | 5 ----- 1 file changed, 5 deletions(-) diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index a1dc708d32..af34cf20db 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -72,11 +72,6 @@ variable "system_alarm_sns_topic_arn" { description = "The ARN of the SNS Topic used for raising alerts to Slack for CW alarms." } -variable "resource_scope" { - type = string - description = "A deployment scoping strategy to use either the environment scope or sub-environment scope if it has one" -} - variable "resource_scope" { type = string description = < Date: Fri, 20 Feb 2026 12:21:01 +0000 Subject: [PATCH 11/31] bump test --- .../tests/test_create_notification.py | 202 ++++++++++++++---- 1 file changed, 162 insertions(+), 40 deletions(-) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 03ed816776..cc93fdf5f8 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -1,16 +1,22 @@ +import copy import json import unittest +from pathlib import Path from unittest.mock import MagicMock, patch from constants import IMMUNISATION_TYPE, SPEC_VERSION -from create_notification import calculate_age_at_vaccination, create_mns_notification +from create_notification import ( + calculate_age_at_vaccination, + create_mns_notification, + get_practitioner_details_from_pds, +) class TestCalculateAgeAtVaccination(unittest.TestCase): """Tests for age calculation at vaccination time.""" def test_age_calculation_yyyymmdd_format(self): - """Test age calculation with YYYYMMDD format (actual format from payload).""" + """Test age calculation with YYYYMMDD format.""" birth_date = "20040609" vaccination_date = "20260212" @@ -54,48 +60,131 @@ def test_age_calculation_infant(self): self.assertEqual(age, 0) + def test_age_calculation_leap_year_birthday(self): + """Test age calculation with leap year birthday.""" + birth_date = "20000229" + vaccination_date = "20240228" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 23) + + def test_age_calculation_same_day_different_year(self): + """Test age calculation for same day in different year.""" + birth_date = "20000101" + vaccination_date = "20250101" + + age = calculate_age_at_vaccination(birth_date, vaccination_date) + + self.assertEqual(age, 25) + + +class TestGetPractitionerDetailsFromPds(unittest.TestCase): + """Tests for get_practitioner_details_from_pds function.""" + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_success(self, mock_logger, mock_pds_get): + """Test successful retrieval of GP ODS code.""" + mock_pds_get.return_value = {"generalPractitioner": {"value": "Y12345"}} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_pds_get.assert_called_once_with("9481152782") + mock_logger.warning.assert_not_called() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_gp_details(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is missing.""" + mock_pds_get.return_value = {"name": "John Doe"} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_once_with("No patient details found for NHS number") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_gp_is_none(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is None.""" + mock_pds_get.return_value = {"generalPractitioner": None} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_once() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): + """Test when value field is missing from generalPractitioner.""" + mock_pds_get.return_value = {"generalPractitioner": {"system": "https://fhir.nhs.uk"}} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): + """Test when value is empty string.""" + mock_pds_get.return_value = {"generalPractitioner": {"value": ""}} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): + """Test when PDS API raises exception.""" + mock_pds_get.side_effect = Exception("PDS API error") + + with self.assertRaises(Exception) as context: + get_practitioner_details_from_pds("9481152782") + + self.assertEqual(str(context.exception), "PDS API error") + mock_logger.exception.assert_called_once() + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_patient_details_none(self, mock_logger, mock_pds_get): + """Test when pds_get_patient_details returns None.""" + mock_pds_get.return_value = None + + with self.assertRaises(AttributeError): + get_practitioner_details_from_pds("9481152782") + class TestCreateMnsNotification(unittest.TestCase): """Tests for MNS notification creation.""" + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + sample_event_path = Path(__file__).parent.parent / "tests" / "sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + # Convert body from dict to JSON string (as it would be in real SQS) + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + cls.sample_sqs_event = raw_event + def setUp(self): """Set up test fixtures.""" - self.sample_sqs_event = { - "messageId": "98ed30eb-829f-41df-8a73-57fef70cf161", - "body": json.dumps( - { - "eventID": "b1ba2a48eae68bf43a8cb49b400788c6", - "eventName": "INSERT", - "dynamodb": { - "NewImage": { - "ImmsID": {"S": "d058014c-b0fd-4471-8db9-3316175eb825"}, - "VaccineType": {"S": "hib"}, - "SupplierSystem": {"S": "TPP"}, - "DateTimeStamp": {"S": "2026-02-12T17:45:37+00:00"}, - "Imms": { - "M": { - "NHS_NUMBER": {"S": "9481152782"}, - "PERSON_DOB": {"S": "20040609"}, - "DATE_AND_TIME": {"S": "20260212T174437"}, - "VACCINE_TYPE": {"S": "hib"}, - "SITE_CODE": {"S": "B0C4P"}, - } - }, - "Operation": {"S": "CREATE"}, - } - }, - } - ), - } - self.expected_gp_ods_code = "Y12345" self.expected_immunisation_url = "https://int.api.service.nhs.uk/immunisation-fhir-api" @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") @patch("create_notification.uuid.uuid4") - def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, mock_get_gp): - """Test successful MNS notification creation.""" + def test_create_mns_notification_success_with_real_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): + """Test successful MNS notification creation using real SQS event.""" mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -113,8 +202,8 @@ def test_create_mns_notification_success(self, mock_uuid, mock_get_service_url, @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_dataref_format(self, mock_get_service_url, mock_get_gp): - """Test dataref URL format is correct.""" + def test_create_mns_notification_dataref_format_real_payload(self, mock_get_service_url, mock_get_gp): + """Test dataref URL format is correct with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -125,8 +214,8 @@ def test_create_mns_notification_dataref_format(self, mock_get_service_url, mock @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mock_get_gp): - """Test all filtering fields are populated correctly.""" + def test_create_mns_notification_filtering_fields_real_payload(self, mock_get_service_url, mock_get_gp): + """Test all filtering fields are populated correctly with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -142,8 +231,8 @@ def test_create_mns_notification_filtering_fields(self, mock_get_service_url, mo @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_age_calculation(self, mock_get_service_url, mock_get_gp): - """Test patient age is calculated correctly.""" + def test_create_mns_notification_age_calculation_real_payload(self, mock_get_service_url, mock_get_gp): + """Test patient age is calculated correctly with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -153,8 +242,8 @@ def test_create_mns_notification_age_calculation(self, mock_get_service_url, moc @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_calls_get_practitioner(self, mock_get_service_url, mock_get_gp): - """Test get_practitioner_details_from_pds is called with correct NHS number.""" + def test_create_mns_notification_calls_get_practitioner_real_payload(self, mock_get_service_url, mock_get_gp): + """Test get_practitioner_details_from_pds is called with correct NHS number from real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -219,6 +308,39 @@ def test_create_mns_notification_required_fields_present(self, mock_get_service_ for field in required_fields: self.assertIn(field, result, f"Required field '{field}' missing") + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_missing_imms_data_field(self, mock_get_service_url, mock_get_gp): + """Test handling when a required field is missing from imms_data.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + incomplete_event = { + "messageId": "test-id", + "body": json.dumps({"dynamodb": {"NewImage": {"ImmsID": {"S": "test-id"}}}}), + } + + with self.assertRaises((KeyError, TypeError)): + create_mns_notification(incomplete_event) + + +@patch("create_notification.get_practitioner_details_from_pds") +@patch("create_notification.get_service_url") +def test_create_mns_notification_with_update_action(self, mock_get_service_url, mock_get_gp): + """Test notification creation with UPDATE action using real payload structure.""" + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + update_event = copy.deepcopy(self.sample_sqs_event) + + update_event["body"]["dynamodb"]["NewImage"]["Operation"]["S"] = "UPDATE" + + result = create_mns_notification(update_event) + + self.assertEqual(result["filtering"]["action"], "UPDATE") + mock_get_service_url.assert_called() + mock_get_gp.assert_called() + if __name__ == "__main__": unittest.main() From 6290ba577aaa368314f1472e6afb652d2670633e Mon Sep 17 00:00:00 2001 From: Akol125 Date: Mon, 23 Feb 2026 11:43:06 +0000 Subject: [PATCH 12/31] add terraform and revert id_sync --- .../mns_publisher/mns_publisher_lambda.tf | 3 ++ .../modules/mns_publisher/variables.tf | 4 +-- .../src/exceptions/id_sync_exception.py | 6 ++++ lambdas/id_sync/src/ieds_db_operations.py | 10 +++--- lambdas/id_sync/src/pds_details.py | 31 +++++++++++++++++++ .../id_sync/tests/test_ieds_db_operations.py | 4 +-- .../src/common/api_clients/mns_service.py | 16 ++++++---- .../api_clients/test_mns_service.py | 4 +-- .../api_clients/test_pds_details.py | 15 --------- 9 files changed, 61 insertions(+), 32 deletions(-) create mode 100644 lambdas/id_sync/src/exceptions/id_sync_exception.py diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 7d589fbeb5..3c6a5fadad 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -215,6 +215,9 @@ resource "aws_lambda_event_source_mapping" "mns_outbound_event_sqs_to_lambda" { function_name = aws_lambda_function.mns_publisher_lambda.arn batch_size = 10 enabled = true + + # Enables partial batch responses using `batchItemFailures` + function_response_types = ["ReportBatchItemFailures"] } resource "aws_cloudwatch_log_metric_filter" "mns_publisher_error_logs" { diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index af34cf20db..e659df38f1 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -76,8 +76,8 @@ variable "resource_scope" { type = string description = < d except Exception as e: logger.exception("Error updating patient ID") - raise PdsSyncException( + raise IdSyncException( message="Error updating patient ID", ) from e @@ -65,11 +65,11 @@ def get_items_from_patient_id(id: str) -> list: patient_pk = f"Patient#{id}" try: return paginate_items_for_patient_pk(patient_pk) - except PdsSyncException: + except IdSyncException: raise except Exception: logger.exception("Error querying items for patient PK") - raise PdsSyncException( + raise IdSyncException( message="Error querying items for patient PK", ) @@ -94,7 +94,7 @@ def paginate_items_for_patient_pk(patient_pk: str) -> list: if "Items" not in response: # Unexpected DynamoDB response shape - surface as PdsSyncException logger.exception("Unexpected DynamoDB response: missing 'Items'") - raise PdsSyncException( + raise IdSyncException( message="No Items in DynamoDB response", ) diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index 27492ceb7d..62ef6c247d 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -2,6 +2,37 @@ Operations related to PDS (Patient Demographic Service) """ +import tempfile + +from common.api_clients.authentication import AppRestrictedAuth, Service +from common.api_clients.pds_service import PdsService +from common.cache import Cache +from common.clients import get_secrets_manager_client, logger +from exceptions.id_sync_exception import IdSyncException +from os_vars import get_pds_env + +pds_env = get_pds_env() +safe_tmp_dir = tempfile.mkdtemp(dir="/tmp") + + +# Get Patient details from external service PDS using NHS number from MNS notification +def pds_get_patient_details(nhs_number: str) -> dict: + try: + cache = Cache(directory=safe_tmp_dir) + authenticator = AppRestrictedAuth( + service=Service.PDS, + secret_manager_client=get_secrets_manager_client(), + environment=pds_env, + cache=cache, + ) + pds_service = PdsService(authenticator, pds_env) + patient = pds_service.get_patient_details(nhs_number) + return patient + except Exception as e: + msg = "Error retrieving patient details from PDS" + logger.exception(msg) + raise IdSyncException(message=msg) from e + def get_nhs_number_from_pds_resource(pds_resource: dict) -> str: """Simple helper to get the NHS Number from a PDS Resource. No handling as this is a mandatory field in the PDS diff --git a/lambdas/id_sync/tests/test_ieds_db_operations.py b/lambdas/id_sync/tests/test_ieds_db_operations.py index 39c29c8f3f..dd5fac54ac 100644 --- a/lambdas/id_sync/tests/test_ieds_db_operations.py +++ b/lambdas/id_sync/tests/test_ieds_db_operations.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock, patch import ieds_db_operations -from common.api_clients.errors import PdsSyncException +from exceptions.id_sync_exception import IdSyncException from ieds_db_operations import extract_patient_resource_from_item @@ -419,7 +419,7 @@ def test_ieds_update_patient_id_update_exception(self): ieds_db_operations.ieds_update_patient_id(old_id, new_id, mock_items) exception = context.exception - self.assertIsInstance(exception, PdsSyncException) + self.assertIsInstance(exception, IdSyncException) self.assertEqual(exception.message, "Error updating patient ID") # Verify transact was attempted diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 0c2f3569ca..7b27147bf1 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -12,7 +12,7 @@ SQS_ARN = os.getenv("SQS_ARN") apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") -MNS_URL = ( +MNS_BASE_URL = ( "https://api.service.nhs.uk/multicast-notification-service" if apigee_env == "prod" else "https://int.api.service.nhs.uk/multicast-notification-service" @@ -45,7 +45,7 @@ def __init__(self, authenticator: AppRestrictedAuth): def subscribe_notification(self) -> dict | None: response = requests.request( "POST", - f"{MNS_URL}/subscriptions", + f"{MNS_BASE_URL}/subscriptions", headers=self.request_headers, timeout=15, data=json.dumps(self.subscription_payload), @@ -57,9 +57,9 @@ def subscribe_notification(self) -> dict | None: def get_subscription(self) -> dict | None: response = request_with_retry_backoff( - "GET", f"{MNS_URL}/subscriptions", headers=self.request_headers, timeout=10 + "GET", f"{MNS_BASE_URL}/subscriptions", headers=self.request_headers, timeout=10 ) - logging.info(f"GET {MNS_URL}/subscriptions") + logging.info(f"GET {MNS_BASE_URL}/subscriptions") logging.debug(f"Headers: {self.request_headers}") if response.status_code == 200: @@ -95,7 +95,7 @@ def check_subscription(self) -> dict: def delete_subscription(self, subscription_id: str) -> str: """Delete the subscription by ID.""" - url = f"{MNS_URL}/subscriptions/{subscription_id}" + url = f"{MNS_BASE_URL}/subscriptions/{subscription_id}" response = request_with_retry_backoff("DELETE", url, headers=self.request_headers, timeout=10) if response.status_code == 204: logging.info(f"Deleted subscription {subscription_id}") @@ -121,7 +121,11 @@ def check_delete_subscription(self): def publish_notification(self, notification_payload) -> dict | None: self.request_headers["Content-Type"] = "application/cloudevents+json" response = requests.request( - "POST", f"{MNS_URL}/events", headers=self.request_headers, timeout=15, data=json.dumps(notification_payload) + "POST", + f"{MNS_BASE_URL}/events", + headers=self.request_headers, + timeout=15, + data=json.dumps(notification_payload), ) if response.status_code in (200, 201): return response.json() diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py index 4a253420d4..c8c167608d 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py @@ -12,7 +12,7 @@ UnhandledResponseError, raise_error_response, ) -from common.api_clients.mns_service import MNS_URL, MnsService +from common.api_clients.mns_service import MNS_BASE_URL, MnsService SQS_ARN = "arn:aws:sqs:eu-west-2:123456789012:my-queue" @@ -148,7 +148,7 @@ def test_delete_subscription_success(self, mock_delete): result = service.delete_subscription("sub-id-123") self.assertTrue(result) mock_delete.assert_called_with( - method="DELETE", url=f"{MNS_URL}/subscriptions/sub-id-123", headers=service.request_headers, timeout=10 + method="DELETE", url=f"{MNS_BASE_URL}/subscriptions/sub-id-123", headers=service.request_headers, timeout=10 ) @patch("common.api_clients.mns_service.requests.request") diff --git a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py index dbbe483cf2..f833c10d07 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_pds_details.py +++ b/lambdas/shared/tests/test_common/api_clients/test_pds_details.py @@ -207,18 +207,3 @@ def test_pds_get_patient_details(self): # Assert - function should extract the value from first identifier self.assertEqual(result, mock_pds_response) self.mock_pds_service_instance.get_patient_details.assert_called_once_with(test_nhs_number) - - # def test_get_nhs_number_from_pds_resource(self): - # """Test that the NHS Number is retrieved from a full PDS patient resource.""" - # mock_pds_resource = { - # "identifier": [ - # { - # "system": "https://fhir.nhs.uk/Id/nhs-number", - # "value": "123456789012", - # } - # ] - # } - - # result = get_nhs_number_from_pds_resource(mock_pds_resource) - - # self.assertEqual(result, "123456789012") From 44d469c73462733142ff4072aaf1ed06402702eb Mon Sep 17 00:00:00 2001 From: Akol125 Date: Mon, 23 Feb 2026 13:28:03 +0000 Subject: [PATCH 13/31] fix mns, test, pds --- .../mns_publisher/src/create_notification.py | 10 +++-- lambdas/mns_publisher/src/lambda_handler.py | 8 +++- lambdas/mns_subscription/src/subscribe_mns.py | 2 +- .../mns_subscription/src/unsubscribe_mns.py | 2 +- .../src/common/api_clients/mns_service.py | 28 +++++++------ .../src/common/api_clients}/mns_setup.py | 0 .../api_clients/test_mns_service.py | 40 ++++++++++++++----- .../api_clients}/test_mns_setup.py | 8 ++-- 8 files changed, 64 insertions(+), 34 deletions(-) rename lambdas/{mns_subscription/src => shared/src/common/api_clients}/mns_setup.py (100%) rename lambdas/{mns_subscription/tests => shared/tests/test_common/api_clients}/test_mns_setup.py (79%) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 96008e3337..46bc3cadf5 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -68,12 +68,14 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: def get_practitioner_details_from_pds(nhs_number: str) -> str | None: try: patient_details = pds_get_patient_details(nhs_number) - patient_gp = patient_details.get("generalPractitioner") - if not patient_gp: - logger.warning("No patient details found for NHS number") + + general_practitioners = patient_details.get("generalPractitioner", []) + if not general_practitioners or len(general_practitioners) == 0: + logger.warning("No GP details found for patient") return None - gp_ods_code = patient_gp.get("value") + patient_gp = general_practitioners[0] + gp_ods_code = patient_gp.get("identifier", {}).get("value") if not gp_ods_code: logger.warning("GP ODS code not found in practitioner details") return None diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 6576fa82d3..f0b0568b96 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,12 +1,15 @@ import json +import os from typing import Tuple from aws_lambda_typing import context, events -from common.api_clients.mns_service import MnsService +from common.api_clients.mns_setup import get_mns_service from common.clients import logger from create_notification import create_mns_notification +apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") + def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list]: event_records = event.get("Records", []) @@ -21,7 +24,8 @@ def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list notification_id = mns_notification_payload.get("id", None) # generated UUID for MNS logger.info("Processing message", trace_id=notification_id) - mns_pub_response = MnsService.publish_notification(mns_notification_payload) + mns_service = get_mns_service(mns=apigee_env) + mns_pub_response = mns_service.publish_notification(mns_notification_payload) if mns_pub_response["status_code"] != 201: raise RuntimeError("MNS publish failed") diff --git a/lambdas/mns_subscription/src/subscribe_mns.py b/lambdas/mns_subscription/src/subscribe_mns.py index 111871df0e..7fe0cd7c3c 100644 --- a/lambdas/mns_subscription/src/subscribe_mns.py +++ b/lambdas/mns_subscription/src/subscribe_mns.py @@ -1,7 +1,7 @@ import logging import os -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") diff --git a/lambdas/mns_subscription/src/unsubscribe_mns.py b/lambdas/mns_subscription/src/unsubscribe_mns.py index 1022cea2ee..10780503ae 100644 --- a/lambdas/mns_subscription/src/unsubscribe_mns.py +++ b/lambdas/mns_subscription/src/unsubscribe_mns.py @@ -1,7 +1,7 @@ import logging import os -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 7b27147bf1..9aeb21f479 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -23,11 +23,6 @@ class MnsService: def __init__(self, authenticator: AppRestrictedAuth): self.authenticator = authenticator self.access_token = self.authenticator.get_access_token() - self.request_headers = { - "Content-Type": "application/fhir+json", - "Authorization": f"Bearer {self.access_token}", - "X-Correlation-ID": str(uuid.uuid4()), - } self.subscription_payload = { "resourceType": "Subscription", "status": "requested", @@ -42,11 +37,19 @@ def __init__(self, authenticator: AppRestrictedAuth): logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") + def _build_headers(self, content_type: str = "application/fhir+json") -> dict: + """Build request headers with authentication and correlation ID.""" + return { + "Content-Type": content_type, + "Authorization": f"Bearer {self.access_token}", + "X-Correlation-ID": str(uuid.uuid4()), + } + def subscribe_notification(self) -> dict | None: response = requests.request( "POST", f"{MNS_BASE_URL}/subscriptions", - headers=self.request_headers, + headers=self._build_headers(), timeout=15, data=json.dumps(self.subscription_payload), ) @@ -56,11 +59,11 @@ def subscribe_notification(self) -> dict | None: raise_error_response(response) def get_subscription(self) -> dict | None: - response = request_with_retry_backoff( - "GET", f"{MNS_BASE_URL}/subscriptions", headers=self.request_headers, timeout=10 - ) + """Retrieve existing subscription for this SQS ARN.""" + headers = self._build_headers() + response = request_with_retry_backoff("GET", f"{MNS_BASE_URL}/subscriptions", headers, timeout=10) logging.info(f"GET {MNS_BASE_URL}/subscriptions") - logging.debug(f"Headers: {self.request_headers}") + logging.debug(f"Headers: {headers}") if response.status_code == 200: bundle = response.json() @@ -96,7 +99,7 @@ def check_subscription(self) -> dict: def delete_subscription(self, subscription_id: str) -> str: """Delete the subscription by ID.""" url = f"{MNS_BASE_URL}/subscriptions/{subscription_id}" - response = request_with_retry_backoff("DELETE", url, headers=self.request_headers, timeout=10) + response = request_with_retry_backoff("DELETE", url, headers=self._build_headers(), timeout=10) if response.status_code == 204: logging.info(f"Deleted subscription {subscription_id}") return "Subscription Successfully Deleted..." @@ -119,11 +122,10 @@ def check_delete_subscription(self): return f"Error deleting subscription: {str(e)}" def publish_notification(self, notification_payload) -> dict | None: - self.request_headers["Content-Type"] = "application/cloudevents+json" response = requests.request( "POST", f"{MNS_BASE_URL}/events", - headers=self.request_headers, + headers=self._build_headers(content_type="application/cloudevents+json"), timeout=15, data=json.dumps(notification_payload), ) diff --git a/lambdas/mns_subscription/src/mns_setup.py b/lambdas/shared/src/common/api_clients/mns_setup.py similarity index 100% rename from lambdas/mns_subscription/src/mns_setup.py rename to lambdas/shared/src/common/api_clients/mns_setup.py diff --git a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py index c8c167608d..3cc9daab9e 100644 --- a/lambdas/shared/tests/test_common/api_clients/test_mns_service.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_service.py @@ -12,7 +12,7 @@ UnhandledResponseError, raise_error_response, ) -from common.api_clients.mns_service import MNS_BASE_URL, MnsService +from common.api_clients.mns_service import MnsService SQS_ARN = "arn:aws:sqs:eu-west-2:123456789012:my-queue" @@ -138,18 +138,34 @@ def test_check_subscription_creates_if_not_found(self, mock_request): self.assertEqual(result, {"subscriptionId": "abc123"}) self.assertEqual(mock_request.call_count, 2) - @patch("common.api_clients.mns_service.requests.request") - def test_delete_subscription_success(self, mock_delete): + @patch("common.api_clients.mns_service.request_with_retry_backoff") + def test_delete_subscription_success(self, mock_retry_request): + """Test successful subscription deletion.""" mock_response = MagicMock() mock_response.status_code = 204 - mock_delete.return_value = mock_response + mock_retry_request.return_value = mock_response service = MnsService(self.authenticator) result = service.delete_subscription("sub-id-123") - self.assertTrue(result) - mock_delete.assert_called_with( - method="DELETE", url=f"{MNS_BASE_URL}/subscriptions/sub-id-123", headers=service.request_headers, timeout=10 - ) + + self.assertEqual(result, "Subscription Successfully Deleted...") + + # Verify the request was made correctly + mock_retry_request.assert_called_once() + + # Get call arguments + args, kwargs = mock_retry_request.call_args + + # Verify method and URL + self.assertEqual(args[0], "DELETE") + self.assertIn("/subscriptions/sub-id-123", args[1]) + + # Verify headers exist + self.assertIn("headers", kwargs) + self.assertIn("Authorization", kwargs["headers"]) + + # Verify timeout + self.assertEqual(kwargs["timeout"], 10) @patch("common.api_clients.mns_service.requests.request") def test_delete_subscription_401(self, mock_delete): @@ -296,7 +312,13 @@ def test_publish_notification_success(self, mock_request): result = service.publish_notification(notification_payload) self.assertEqual(result["status"], "published") - self.assertEqual(service.request_headers["Content-Type"], "application/cloudevents+json") + + # Verify the request was made correctly + mock_request.assert_called_once() + call_args = mock_request.call_args + + headers = call_args[1]["headers"] + self.assertEqual(headers["Content-Type"], "application/cloudevents+json") mock_request.assert_called_once() @patch("common.api_clients.mns_service.requests.request") diff --git a/lambdas/mns_subscription/tests/test_mns_setup.py b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py similarity index 79% rename from lambdas/mns_subscription/tests/test_mns_setup.py rename to lambdas/shared/tests/test_common/api_clients/test_mns_setup.py index 53aa67941a..06fe1959cf 100644 --- a/lambdas/mns_subscription/tests/test_mns_setup.py +++ b/lambdas/shared/tests/test_common/api_clients/test_mns_setup.py @@ -1,13 +1,13 @@ import unittest from unittest.mock import MagicMock, patch -from mns_setup import get_mns_service +from common.api_clients.mns_setup import get_mns_service class TestGetMnsService(unittest.TestCase): - @patch("mns_setup.boto3.client") - @patch("mns_setup.AppRestrictedAuth") - @patch("mns_setup.MnsService") + @patch("common.api_clients.mns_setup.boto3.client") + @patch("common.api_clients.mns_setup.AppRestrictedAuth") + @patch("common.api_clients.mns_setup.MnsService") def test_get_mns_service(self, mock_mns_service, mock_app_auth, mock_boto_client): # Arrange mock_auth_instance = MagicMock() From 565a765c333d94e6c1473ddf4880ad77caba6f2d Mon Sep 17 00:00:00 2001 From: Akol125 Date: Mon, 23 Feb 2026 14:51:16 +0000 Subject: [PATCH 14/31] add gp identifier --- .../mns_publisher/mns_publisher_lambda.tf | 6 +- lambdas/id_sync/src/id_sync.py | 12 +-- lambdas/id_sync/tests/test_id_sync.py | 18 ++-- lambdas/mns_publisher/src/constants.py | 2 +- .../mns_publisher/src/create_notification.py | 17 +++- .../tests/test_create_notification.py | 88 ++++++++++++++++++- .../tests/test_lambda_handler.py | 84 ++++++++++++------ 7 files changed, 177 insertions(+), 50 deletions(-) diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 3c6a5fadad..149322d1f0 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -191,9 +191,9 @@ resource "aws_lambda_function" "mns_publisher_lambda" { environment { variables = { - SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name - "IMMUNIZATION_ENV" = var.resource_scope, - "IMMUNIZATION_BASE_PATH" = var.sub_environment + SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name + IMMUNIZATION_ENV = var.resource_scope, + IMMUNIZATION_BASE_PATH = var.sub_environment } } diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index 51a1276f00..803417ff1c 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -1,16 +1,16 @@ """ - Parses the incoming AWS event into `AwsLambdaEvent` and iterate its `records`. - Delegate each record to `process_record` and collect `nhs_number` from each result. -- If any record has status == "error" raise `PdsSyncException` with aggregated nhs_numbers. -- Any unexpected error is wrapped into `PdsSyncException(message="Error processing id_sync event")`. +- If any record has status == "error" raise `IdSyncException` with aggregated nhs_numbers. +- Any unexpected error is wrapped into `IdSyncException(message="Error processing id_sync event")`. """ from typing import Any, Dict -from common.api_clients.errors import PdsSyncException from common.aws_lambda_event import AwsLambdaEvent from common.clients import STREAM_NAME, logger from common.log_decorator import logging_decorator +from exceptions.id_sync_exception import IdSyncException from record_processor import process_record @@ -34,7 +34,7 @@ def handler(event_data: Dict[str, Any], _context) -> Dict[str, Any]: error_count += 1 if error_count > 0: - raise PdsSyncException( + raise IdSyncException( message=f"Processed {len(records)} records with {error_count} errors", ) @@ -43,10 +43,10 @@ def handler(event_data: Dict[str, Any], _context) -> Dict[str, Any]: logger.info("id_sync handler completed: %s", response) return response - except PdsSyncException as e: + except IdSyncException as e: logger.exception(f"id_sync error: {e.message}") raise except Exception: msg = "Error processing id_sync event" logger.exception(msg) - raise PdsSyncException(message=msg) + raise IdSyncException(message=msg) diff --git a/lambdas/id_sync/tests/test_id_sync.py b/lambdas/id_sync/tests/test_id_sync.py index 673ad3f42b..619ffd9818 100644 --- a/lambdas/id_sync/tests/test_id_sync.py +++ b/lambdas/id_sync/tests/test_id_sync.py @@ -3,7 +3,7 @@ with patch("common.log_decorator.logging_decorator") as mock_decorator: mock_decorator.return_value = lambda f: f # Pass-through decorator - from common.api_clients.errors import PdsSyncException + from exceptions.id_sync_exception import IdSyncException from id_sync import handler @@ -93,7 +93,7 @@ def test_handler_error_single_record(self): } # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception @@ -117,7 +117,7 @@ def test_handler_mixed_success_error(self): ] # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.multi_sqs_event, None) error = exception_context.exception @@ -139,7 +139,7 @@ def test_handler_all_records_fail(self): ] # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.multi_sqs_event, None) exception = exception_context.exception # Assertions @@ -187,7 +187,7 @@ def test_handler_aws_lambda_event_exception(self): self.mock_aws_lambda_event.side_effect = Exception("AwsLambdaEvent creation failed") # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.single_sqs_event, None) result = exception_context.exception @@ -208,7 +208,7 @@ def test_handler_process_record_exception(self): self.mock_process_record.side_effect = Exception("Process record failed") # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception # Assertions @@ -233,12 +233,12 @@ def test_handler_process_record_missing_nhs_number(self): } # Call handler and expect exception - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.single_sqs_event, None) exception = exception_context.exception - self.assertIsInstance(exception, PdsSyncException) + self.assertIsInstance(exception, IdSyncException) self.assertEqual(exception.message, "Processed 1 records with 1 errors") self.mock_logger.exception.assert_called_once_with(f"id_sync error: {exception.message}") @@ -275,7 +275,7 @@ def test_handler_error_count_tracking(self): ] # Call handler - with self.assertRaises(PdsSyncException) as exception_context: + with self.assertRaises(IdSyncException) as exception_context: handler(self.multi_sqs_event, None) exception = exception_context.exception # Assertions - should track 2 errors out of 4 records diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index 61d1e45550..82f76e5739 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -2,7 +2,7 @@ # Static constants for the MNS notification creation process SPEC_VERSION = "1.0" -IMMUNISATION_TYPE = "imms-vaccinations-2" +IMMUNISATION_TYPE = "imms-vaccinations-1" # Fields from the incoming SQS message that forms part of the base schema and filtering attributes for MNS notifications diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 46bc3cadf5..469eb0ea6f 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -75,11 +75,26 @@ def get_practitioner_details_from_pds(nhs_number: str) -> str | None: return None patient_gp = general_practitioners[0] - gp_ods_code = patient_gp.get("identifier", {}).get("value") + patient_gp_identifier = patient_gp.get("identifier", {}) + + gp_ods_code = patient_gp_identifier.get("value") if not gp_ods_code: logger.warning("GP ODS code not found in practitioner details") return None + # Check if registration is current + period = patient_gp_identifier.get("period", {}) + gp_period_end_date = period.get("end", None) + + if gp_period_end_date: + # Parse end date (format: YYYY-MM-DD) + end_date = datetime.strptime(gp_period_end_date, "%Y-%m-%d").date() + today = datetime.now().date() + + if end_date < today: + logger.warning("GP registration has ended") + return None + return gp_ods_code except Exception as error: logger.exception("Failed to get practitioner details from pds", error) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index cc93fdf5f8..85890ea2e0 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -86,7 +86,7 @@ class TestGetPractitionerDetailsFromPds(unittest.TestCase): @patch("create_notification.logger") def test_get_practitioner_success(self, mock_logger, mock_pds_get): """Test successful retrieval of GP ODS code.""" - mock_pds_get.return_value = {"generalPractitioner": {"value": "Y12345"}} + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} result = get_practitioner_details_from_pds("9481152782") @@ -103,7 +103,7 @@ def test_get_practitioner_no_gp_details(self, mock_logger, mock_pds_get): result = get_practitioner_details_from_pds("9481152782") self.assertIsNone(result) - mock_logger.warning.assert_called_once_with("No patient details found for NHS number") + mock_logger.warning.assert_called_once_with("No GP details found for patient") @patch("create_notification.pds_get_patient_details") @patch("create_notification.logger") @@ -120,7 +120,7 @@ def test_get_practitioner_gp_is_none(self, mock_logger, mock_pds_get): @patch("create_notification.logger") def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): """Test when value field is missing from generalPractitioner.""" - mock_pds_get.return_value = {"generalPractitioner": {"system": "https://fhir.nhs.uk"}} + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {}}]} result = get_practitioner_details_from_pds("9481152782") @@ -131,7 +131,7 @@ def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): @patch("create_notification.logger") def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): """Test when value is empty string.""" - mock_pds_get.return_value = {"generalPractitioner": {"value": ""}} + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": ""}}]} result = get_practitioner_details_from_pds("9481152782") @@ -342,5 +342,85 @@ def test_create_mns_notification_with_update_action(self, mock_get_service_url, mock_get_gp.assert_called() +@patch("create_notification.pds_get_patient_details") +@patch("create_notification.logger") +def test_get_practitioner_success_no_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when no end date (current registration).""" + mock_pds_get.return_value = { + "generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + +@patch("create_notification.pds_get_patient_details") +@patch("create_notification.logger") +def test_get_practitioner_success_future_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when end date is in the future.""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2030-12-31"}}} + ] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + +@patch("create_notification.pds_get_patient_details") +@patch("create_notification.logger") +def test_get_practitioner_expired_registration(self, mock_logger, mock_pds_get): + """Test when GP registration has ended (expired).""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2020-01-01", "end": "2023-12-31"}}} + ] + } + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_with( + "GP registration has ended", + extra={"nhs_number": "9481152782", "gp_ods_code": "Y12345", "end_date": "2023-12-31"}, + ) + + +@patch("create_notification.pds_get_patient_details") +@patch("create_notification.logger") +def test_get_practitioner_invalid_end_date_format(self, mock_logger, mock_pds_get): + """Test when end date has invalid format - should still return GP.""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "invalid-date"}}} + ] + } + + result = get_practitioner_details_from_pds("9481152782") + + # Should still return GP even with invalid date + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_called_with( + "Invalid end date format in GP registration", extra={"nhs_number": "9481152782", "end_date": "invalid-date"} + ) + + +@patch("create_notification.pds_get_patient_details") +@patch("create_notification.logger") +def test_get_practitioner_no_period_field(self, mock_logger, mock_pds_get): + """Test when period field is missing entirely.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() + + if __name__ == "__main__": unittest.main() diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 3180d5c1ab..c59e613e45 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -14,7 +14,13 @@ def setUpClass(cls): """Load the sample SQS event once for all tests.""" sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" with open(sample_event_path, "r") as f: - cls.sample_sqs_event = json.load(f) + raw_event = json.load(f) + + # Convert body to JSON string if it's a dict + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + cls.sample_sqs_event = raw_event def test_extract_trace_ids_success_from_real_payload(self): """Test successful extraction using real SQS event structure.""" @@ -68,7 +74,13 @@ def setUpClass(cls): """Load the sample SQS event once for all tests.""" sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" with open(sample_event_path, "r") as f: - cls.sample_sqs_record = json.load(f) + raw_event = json.load(f) + + # Convert body to JSON string if it's a dict + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + cls.sample_sqs_record = raw_event def setUp(self): """Set up test fixtures.""" @@ -80,31 +92,38 @@ def setUp(self): self.successful_mns_response = {"status_code": 201, "message": "Published"} - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") def test_lambda_handler_single_record_success_real_payload( - self, mock_logger, mock_create_notification, mock_mns_publish + self, mock_logger, mock_create_notification, mock_get_mns ): """Test successful processing using real SQS event payload.""" + # Mock MNS service instance + mock_mns_service = Mock() + mock_mns_service.publish_notification.return_value = self.successful_mns_response + mock_get_mns.return_value = mock_mns_service + mock_create_notification.return_value = self.sample_notification - mock_mns_publish.return_value = self.successful_mns_response event = {"Records": [self.sample_sqs_record]} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": []}) mock_create_notification.assert_called_once_with(self.sample_sqs_record) - mock_mns_publish.assert_called_once_with(self.sample_notification) + mock_mns_service.publish_notification.assert_called_once_with(self.sample_notification) mock_logger.exception.assert_not_called() - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_multiple_records_all_success(self, mock_logger, mock_create_notification, mock_mns_publish): + def test_lambda_handler_multiple_records_all_success(self, mock_logger, mock_create_notification, mock_get_mns): """Test successful processing of multiple SQS records.""" + mock_mns_service = Mock() + mock_mns_service.publish_notification.return_value = self.successful_mns_response + mock_get_mns.return_value = mock_mns_service + mock_create_notification.return_value = self.sample_notification - mock_mns_publish.return_value = self.successful_mns_response record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "different-message-id" @@ -114,14 +133,17 @@ def test_lambda_handler_multiple_records_all_success(self, mock_logger, mock_cre self.assertEqual(result, {"batchItemFailures": []}) self.assertEqual(mock_create_notification.call_count, 2) - self.assertEqual(mock_mns_publish.call_count, 2) + self.assertEqual(mock_mns_service.publish_notification.call_count, 2) mock_logger.exception.assert_not_called() - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_single_record_failure(self, mock_logger, mock_create_notification, mock_mns_publish): + def test_lambda_handler_single_record_failure(self, mock_logger, mock_create_notification, mock_get_mns): """Test handling of a single record failure.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + mock_create_notification.side_effect = Exception("Processing error") event = {"Records": [self.sample_sqs_record]} @@ -131,15 +153,18 @@ def test_lambda_handler_single_record_failure(self, mock_logger, mock_create_not self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) mock_logger.exception.assert_called_once() mock_logger.warning.assert_called_once_with("Batch completed with 1 failures") - mock_mns_publish.assert_not_called() + mock_mns_service.publish_notification.assert_not_called() - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_mns_publish_failure(self, mock_logger, mock_create_notification, mock_mns_publish): + def test_lambda_handler_mns_publish_failure(self, mock_logger, mock_create_notification, mock_get_mns): """Test handling when MNS publish returns non-201 status.""" + mock_mns_service = Mock() + mock_mns_service.publish_notification.return_value = {"status_code": 400, "message": "Bad Request"} + mock_get_mns.return_value = mock_mns_service + mock_create_notification.return_value = self.sample_notification - mock_mns_publish.return_value = {"status_code": 400, "message": "Bad Request"} event = {"Records": [self.sample_sqs_record]} result = lambda_handler(event, Mock()) @@ -148,13 +173,16 @@ def test_lambda_handler_mns_publish_failure(self, mock_logger, mock_create_notif self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) mock_logger.exception.assert_called_once() - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_partial_batch_failure(self, mock_logger, mock_create_notification, mock_mns_publish): + def test_lambda_handler_partial_batch_failure(self, mock_logger, mock_create_notification, mock_get_mns): """Test partial batch failure where one record succeeds and one fails.""" + mock_mns_service = Mock() + mock_mns_service.publish_notification.side_effect = [self.successful_mns_response, Exception("MNS API error")] + mock_get_mns.return_value = mock_mns_service + mock_create_notification.return_value = self.sample_notification - mock_mns_publish.side_effect = [self.successful_mns_response, Exception("MNS API error")] record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "msg-456" @@ -167,26 +195,30 @@ def test_lambda_handler_partial_batch_failure(self, mock_logger, mock_create_not self.assertEqual(mock_create_notification.call_count, 2) mock_logger.exception.assert_called_once() - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_empty_records(self, mock_logger, mock_create_notification, mock_mns_publish): + def test_lambda_handler_empty_records(self, mock_logger, mock_create_notification, mock_get_mns): """Test handling of empty Records list.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + event = {"Records": []} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": []}) mock_create_notification.assert_not_called() - mock_mns_publish.assert_not_called() + mock_mns_service.publish_notification.assert_not_called() mock_logger.info.assert_called_with("Successfully processed all 0 messages") - @patch("lambda_handler.MnsService.publish_notification") + @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") @patch("lambda_handler.logger") - def test_lambda_handler_logs_correct_trace_ids_on_failure( - self, mock_logger, mock_create_notification, mock_mns_publish - ): + def test_lambda_handler_logs_correct_trace_ids_on_failure(self, mock_logger, mock_create_notification, mock_get_mns): """Test that all trace IDs are logged when an error occurs.""" + mock_mns_service = Mock() + mock_get_mns.return_value = mock_mns_service + mock_create_notification.side_effect = Exception("Test error") event = {"Records": [self.sample_sqs_record]} From 9a5884665eb113e05952acf0ace6a07758b6d192 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Mon, 23 Feb 2026 16:18:14 +0000 Subject: [PATCH 15/31] make payload dynamic --- .../mns_publisher/src/create_notification.py | 4 ++- lambdas/mns_publisher/src/lambda_handler.py | 16 ++++++++-- lambdas/mns_publisher/src/utils.py | 0 .../src/common/api_clients/mns_service.py | 32 +++++++++++++------ 4 files changed, 39 insertions(+), 13 deletions(-) delete mode 100644 lambdas/mns_publisher/src/utils.py diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 469eb0ea6f..51eec06822 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -3,6 +3,8 @@ import uuid from datetime import datetime +from aws_lambda_typing.events.sqs import SQSMessage + from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from common.get_service_url import get_service_url @@ -14,7 +16,7 @@ PDS_BASE_URL = os.getenv("PDS_BASE_URL") -def create_mns_notification(sqs_event: dict) -> dict: +def create_mns_notification(sqs_event: SQSMessage) -> dict: """Create a notification payload for MNS.""" immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index f0b0568b96..8e7cf71d56 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -3,6 +3,7 @@ from typing import Tuple from aws_lambda_typing import context, events +from aws_lambda_typing.events.sqs import SQSMessage from common.api_clients.mns_setup import get_mns_service from common.clients import logger @@ -22,12 +23,21 @@ def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list try: mns_notification_payload = create_mns_notification(record) notification_id = mns_notification_payload.get("id", None) # generated UUID for MNS - logger.info("Processing message", trace_id=notification_id) + action_flag = mns_notification_payload.get("action") + logger.info( + "Processing message", + trace_ids={ + "notification_id": notification_id, + "message_id": message_id, + "immunisation_id": immunisation_id, + "action_flag": action_flag, + }, + ) mns_service = get_mns_service(mns=apigee_env) mns_pub_response = mns_service.publish_notification(mns_notification_payload) - if mns_pub_response["status_code"] != 201: + if mns_pub_response["status_code"] != 200: raise RuntimeError("MNS publish failed") logger.info( "Successfully created MNS notification", @@ -56,7 +66,7 @@ def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list return {"batchItemFailures": batch_item_failures} -def extract_trace_ids(record: dict) -> Tuple[str, str | None]: +def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: """ Extract identifiers for tracing from SQS record. Returns: Tuple of (message_id, immunisation_id) diff --git a/lambdas/mns_publisher/src/utils.py b/lambdas/mns_publisher/src/utils.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index 9aeb21f479..ba3d412b17 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -23,11 +23,25 @@ class MnsService: def __init__(self, authenticator: AppRestrictedAuth): self.authenticator = authenticator self.access_token = self.authenticator.get_access_token() - self.subscription_payload = { + logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") + + def _build_subscription_payload(self, event_type: str, reason: str | None = None, status: str = "requested") -> dict: + """ + Builds subscription payload. + Args: + event_type: Event type to subscribe to (e.g., 'imms-vaccinations-2', 'nhs-number-change-2') + reason: Optional description of the subscription + status: Subscription status (default: 'requested') + Returns: Subscription payload dict + """ + if not reason: + reason = f"Subscribe SQS to {event_type} events" + + return { "resourceType": "Subscription", - "status": "requested", - "reason": "Subscribe SQS to NHS Number Change Events", - "criteria": "eventType=nhs-number-change-2", + "status": status, + "reason": reason, + "criteria": f"eventType={event_type}", "channel": { "type": "message", "endpoint": SQS_ARN, @@ -35,8 +49,6 @@ def __init__(self, authenticator: AppRestrictedAuth): }, } - logging.info(f"Using SQS ARN for subscription: {SQS_ARN}") - def _build_headers(self, content_type: str = "application/fhir+json") -> dict: """Build request headers with authentication and correlation ID.""" return { @@ -45,14 +57,16 @@ def _build_headers(self, content_type: str = "application/fhir+json") -> dict: "X-Correlation-ID": str(uuid.uuid4()), } - def subscribe_notification(self) -> dict | None: + def subscribe_notification(self, event_type: str = "nhs-number-change-2", reason: str | None = None) -> dict | None: + subscription_payload = self._build_subscription_payload(event_type, reason) response = requests.request( "POST", f"{MNS_BASE_URL}/subscriptions", headers=self._build_headers(), timeout=15, - data=json.dumps(self.subscription_payload), + data=json.dumps(subscription_payload), ) + if response.status_code in (200, 201): return response.json() else: @@ -129,7 +143,7 @@ def publish_notification(self, notification_payload) -> dict | None: timeout=15, data=json.dumps(notification_payload), ) - if response.status_code in (200, 201): + if response.status_code == 200: return response.json() else: raise_error_response(response) From 9cc38bd815340451d1aa784d57a2cc16b1eb2c44 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 24 Feb 2026 01:23:24 +0000 Subject: [PATCH 16/31] add typedicts, remove recursion --- lambdas/mns_publisher/src/constants.py | 46 ++++++++++++---- .../mns_publisher/src/create_notification.py | 32 +++++------ lambdas/mns_publisher/src/sqs_dynamo_utils.py | 54 ++++++++++--------- .../tests/test_lambda_handler.py | 2 +- 4 files changed, 81 insertions(+), 53 deletions(-) diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index 82f76e5739..bc91ce3455 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -1,4 +1,4 @@ -from enum import Enum +from typing import TypedDict # Static constants for the MNS notification creation process SPEC_VERSION = "1.0" @@ -6,15 +6,41 @@ # Fields from the incoming SQS message that forms part of the base schema and filtering attributes for MNS notifications -class SQSEventFields(Enum): - DATE_AND_TIME_KEY = "DATE_AND_TIME" - BIRTH_DATE_KEY = "PERSON_DOB" - NHS_NUMBER_KEY = "NHS_NUMBER" - IMMUNISATION_ID_KEY = "ImmsID" - SOURCE_ORGANISATION_KEY = "SITE_CODE" - SOURCE_APPLICATION_KEY = "SupplierSystem" - VACCINE_TYPE = "VACCINE_TYPE" - ACTION = "Operation" +class FilteringData(TypedDict): + """MNS notification filtering attributes.""" + + generalpractitioner: str | None + sourceorganisation: str + sourceapplication: str + subjectage: str + immunisationtype: str + action: str + + +class MnsNotificationPayload(TypedDict): + """CloudEvents-compliant MNS notification payload.""" + + specversion: str + id: str + source: str + type: str + time: str + subject: str + dataref: str + filtering: FilteringData DYNAMO_DB_TYPE_DESCRIPTORS = ("S", "N", "BOOL", "M", "L") + + +class ImmsData(TypedDict): + """Extracted immunisation data from DynamoDB stream.""" + + imms_id: str + supplier_system: str + vaccine_type: str + operation: str + nhs_number: str + person_dob: str + date_and_time: str + site_code: str diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 51eec06822..0e46d5073a 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,4 +1,3 @@ -import json import os import uuid from datetime import datetime @@ -8,43 +7,40 @@ from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from common.get_service_url import get_service_url -from constants import IMMUNISATION_TYPE, SPEC_VERSION, SQSEventFields -from sqs_dynamo_utils import find_imms_value_in_stream +from constants import IMMUNISATION_TYPE, SPEC_VERSION, MnsNotificationPayload +from sqs_dynamo_utils import extract_sqs_imms_data IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") PDS_BASE_URL = os.getenv("PDS_BASE_URL") -def create_mns_notification(sqs_event: SQSMessage) -> dict: +def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: """Create a notification payload for MNS.""" - immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) - incoming_sqs_message = json.loads(sqs_event["body"]) - imms_data = {field: find_imms_value_in_stream(incoming_sqs_message, field.value) for field in SQSEventFields} + # Simple, direct extraction + imms_data = extract_sqs_imms_data(sqs_event) - patient_age = calculate_age_at_vaccination( - imms_data[SQSEventFields.BIRTH_DATE_KEY], imms_data[SQSEventFields.DATE_AND_TIME_KEY] - ) + patient_age = calculate_age_at_vaccination(imms_data["person_dob"], imms_data["date_and_time"]) - gp_ods_code = get_practitioner_details_from_pds(imms_data[SQSEventFields.NHS_NUMBER_KEY]) + gp_ods_code = get_practitioner_details_from_pds(imms_data["nhs_number"]) return { "specversion": SPEC_VERSION, "id": str(uuid.uuid4()), "source": immunisation_url, "type": IMMUNISATION_TYPE, - "time": imms_data[SQSEventFields.DATE_AND_TIME_KEY], - "subject": imms_data[SQSEventFields.NHS_NUMBER_KEY], - "dataref": f"{immunisation_url}/Immunization/{imms_data[SQSEventFields.IMMUNISATION_ID_KEY]}", + "time": imms_data["date_and_time"], + "subject": imms_data["nhs_number"], + "dataref": f"{immunisation_url}/Immunization/{imms_data['imms_id']}", "filtering": { "generalpractitioner": gp_ods_code, - "sourceorganisation": imms_data[SQSEventFields.SOURCE_ORGANISATION_KEY], - "sourceapplication": imms_data[SQSEventFields.SOURCE_APPLICATION_KEY], + "sourceorganisation": imms_data["site_code"], + "sourceapplication": imms_data["supplier_system"], "subjectage": str(patient_age), - "immunisationtype": imms_data[SQSEventFields.VACCINE_TYPE], - "action": imms_data[SQSEventFields.ACTION], + "immunisationtype": imms_data["vaccine_type"], + "action": imms_data["operation"], }, } diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index 5d1f5f0bd2..7ade166bc0 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -1,32 +1,38 @@ -from constants import DYNAMO_DB_TYPE_DESCRIPTORS +import json -""" -Recursion to fetch deeply nested values from DynamoDB stream events. -Time complexity: O(n) where n is the total number of keys in the nested structure. -For typical SQS payloads (~50-100 keys per iteration), this is negligible. -Cleaner than hardcoded path references like data['body']['dynamodb']['NewImage']['Imms']['M']['NHS_NUMBER']['S']. -""" +from constants import DYNAMO_DB_TYPE_DESCRIPTORS, ImmsData -def find_imms_value_in_stream(sqs_event_data: dict, target_key: str): +def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: """ - Recursively search for a key and unwrap DynamoDB type descriptors. - Args: - sqs_event_data: Nested dict from SQS DynamoDB stream event - target_key: The key to find (e.g., 'NHS_NUMBER', 'ImmsID') - Returns: Unwrapped value if found, None otherwise + Extract immunisation data from SQS DynamoDB stream event. + Args: sqs_record: SQS record containing DynamoDB stream data + Returns: Dict with unwrapped values ready to use """ - if isinstance(sqs_event_data, dict): - for key, value in sqs_event_data.items(): - if key == target_key: - return _unwrap_dynamodb_value(value) - result = find_imms_value_in_stream(value, target_key) - if result is not None: - return result - return None - - -def _unwrap_dynamodb_value(value): + body = json.loads(sqs_record.get("body", "{}")) + new_image = body.get("dynamodb", {}).get("NewImage", {}) + + # Get top-level fields + imms_id = _unwrap_dynamodb_value(new_image.get("ImmsID", {})) + supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) + vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) + operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) + + imms_map = new_image.get("Imms", {}).get("M", {}) + + return { + "imms_id": imms_id, + "supplier_system": supplier_system, + "vaccine_type": vaccine_type, + "operation": operation, + "nhs_number": _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})), + "person_dob": _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})), + "date_and_time": _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})), + "site_code": _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})), + } + + +def _unwrap_dynamodb_value(value) -> str: """ Unwrap DynamoDB type descriptor to get the actual value. DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index c59e613e45..8ed3ebe17d 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -90,7 +90,7 @@ def setUp(self): "type": "imms-vaccinations-2", } - self.successful_mns_response = {"status_code": 201, "message": "Published"} + self.successful_mns_response = {"status_code": 200, "message": "Published"} @patch("lambda_handler.get_mns_service") @patch("lambda_handler.create_mns_notification") From ecdf38493e58b98a8d242e5fbd25516e03d233d8 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 24 Feb 2026 13:14:15 +0000 Subject: [PATCH 17/31] refactor lambda and add env vars --- .../dev/internal-dev/variables.tfvars | 1 + .../dev/internal-qa/variables.tfvars | 1 + .../environments/dev/pr/variables.tfvars | 1 + .../environments/dev/ref/variables.tfvars | 1 + .../preprod/int-blue/variables.tfvars | 1 + .../preprod/int-green/variables.tfvars | 1 + .../environments/prod/blue/variables.tfvars | 1 + .../environments/prod/green/variables.tfvars | 1 + infrastructure/instance/mns_publisher.tf | 2 + .../mns_publisher/mns_publisher_lambda.tf | 2 + .../modules/mns_publisher/variables.tf | 8 + infrastructure/instance/variables.tf | 5 + .../mns_publisher/src/create_notification.py | 3 +- lambdas/mns_publisher/src/lambda_handler.py | 74 +----- lambdas/mns_publisher/src/process_records.py | 96 +++++++ lambdas/mns_publisher/src/sqs_dynamo_utils.py | 2 +- .../tests/test_lambda_handler.py | 249 ++++++++++-------- .../src/common/api_clients/mns_service.py | 3 +- .../src/common/api_clients/mns_setup.py | 1 - 19 files changed, 273 insertions(+), 180 deletions(-) create mode 100644 lambdas/mns_publisher/src/process_records.py diff --git a/infrastructure/instance/environments/dev/internal-dev/variables.tfvars b/infrastructure/instance/environments/dev/internal-dev/variables.tfvars index 188bc51e18..2cd3ce1798 100644 --- a/infrastructure/instance/environments/dev/internal-dev/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-dev/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars index d671f09c6d..35c017d36f 100644 --- a/infrastructure/instance/environments/dev/internal-qa/variables.tfvars +++ b/infrastructure/instance/environments/dev/internal-qa/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = false mns_publisher_feature_enabled = true create_mesh_processor = false diff --git a/infrastructure/instance/environments/dev/pr/variables.tfvars b/infrastructure/instance/environments/dev/pr/variables.tfvars index 7d17c90f95..e8489ab153 100644 --- a/infrastructure/instance/environments/dev/pr/variables.tfvars +++ b/infrastructure/instance/environments/dev/pr/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = false mns_publisher_feature_enabled = true # Switch this off once tested fully e2e in Lambda branch create_mesh_processor = false diff --git a/infrastructure/instance/environments/dev/ref/variables.tfvars b/infrastructure/instance/environments/dev/ref/variables.tfvars index 6b3124455a..53741cac7a 100644 --- a/infrastructure/instance/environments/dev/ref/variables.tfvars +++ b/infrastructure/instance/environments/dev/ref/variables.tfvars @@ -2,6 +2,7 @@ environment = "dev" immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "ref" +mns_environment = "int" error_alarm_notifications_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars index e31b7c5474..77c6db1a1e 100644 --- a/infrastructure/instance/environments/preprod/int-blue/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-blue/variables.tfvars @@ -2,6 +2,7 @@ environment = "preprod" immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/preprod/int-green/variables.tfvars b/infrastructure/instance/environments/preprod/int-green/variables.tfvars index e31b7c5474..77c6db1a1e 100644 --- a/infrastructure/instance/environments/preprod/int-green/variables.tfvars +++ b/infrastructure/instance/environments/preprod/int-green/variables.tfvars @@ -2,6 +2,7 @@ environment = "preprod" immunisation_account_id = "084828561157" dspp_core_account_id = "603871901111" pds_environment = "int" +mns_environment = "int" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/prod/blue/variables.tfvars b/infrastructure/instance/environments/prod/blue/variables.tfvars index c8c41101e0..9ddd14d29b 100644 --- a/infrastructure/instance/environments/prod/blue/variables.tfvars +++ b/infrastructure/instance/environments/prod/blue/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "664418956997" dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" +mns_environment = "prod" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/environments/prod/green/variables.tfvars b/infrastructure/instance/environments/prod/green/variables.tfvars index c8c41101e0..9ddd14d29b 100644 --- a/infrastructure/instance/environments/prod/green/variables.tfvars +++ b/infrastructure/instance/environments/prod/green/variables.tfvars @@ -3,6 +3,7 @@ immunisation_account_id = "664418956997" dspp_core_account_id = "232116723729" mns_account_id = "758334270304" pds_environment = "prod" +mns_environment = "prod" error_alarm_notifications_enabled = true mns_publisher_feature_enabled = true diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index e150ec82f2..28d4e62067 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -11,6 +11,8 @@ module "mns_publisher" { sub_environment = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" + pds_environment = var.pds_environment + mns_environment = var.mns_environment private_subnet_ids = local.private_subnet_ids security_group_id = data.aws_security_group.existing_securitygroup.id diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 149322d1f0..20a9136cc0 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -194,6 +194,8 @@ resource "aws_lambda_function" "mns_publisher_lambda" { SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name IMMUNIZATION_ENV = var.resource_scope, IMMUNIZATION_BASE_PATH = var.sub_environment + PDS_ENV = var.pds_environment + MNS_ENV = var.mns_environment } } diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index e659df38f1..383ecace71 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -85,3 +85,11 @@ variable "sub_environment" { type = string description = "Sub-environment name, e.g. internal-dev, internal-qa. The value is set in the Makefile" } + +variable "mns_environment" { + type = string +} + +variable "pds_environment" { + type = string +} diff --git a/infrastructure/instance/variables.tf b/infrastructure/instance/variables.tf index 3a2144b21a..2912fa14c5 100644 --- a/infrastructure/instance/variables.tf +++ b/infrastructure/instance/variables.tf @@ -74,6 +74,11 @@ variable "pds_environment" { default = "int" } +variable "mns_environment" { + type = string + default = "int" +} + variable "mesh_no_invocation_period_seconds" { description = "The maximum duration the MESH Processor Lambda can go without being invoked before the no-invocation alarm is triggered." type = number diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 0e46d5073a..ff0ce760fd 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -12,7 +12,6 @@ IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") -PDS_BASE_URL = os.getenv("PDS_BASE_URL") def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: @@ -48,7 +47,7 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: """ Calculate patient age in years at time of vaccination. - Expects dates in format: YYYYMMDD or YYYYMMDDTHHmmss + Expects dates in format: YYYYMMDD or YYYYMMDDThhmmsszz """ birth_date_str = birth_date[:8] if len(birth_date) >= 8 else birth_date vacc_date_str = vaccination_date[:8] if len(vaccination_date) >= 8 else vaccination_date diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 8e7cf71d56..670c6f8677 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,62 +1,12 @@ -import json -import os -from typing import Tuple - from aws_lambda_typing import context, events -from aws_lambda_typing.events.sqs import SQSMessage -from common.api_clients.mns_setup import get_mns_service from common.clients import logger -from create_notification import create_mns_notification - -apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") +from process_records import process_records def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list]: event_records = event.get("Records", []) - batch_item_failures = [] - - for record in event_records: - message_id, immunisation_id = extract_trace_ids(record) - notification_id = None - - try: - mns_notification_payload = create_mns_notification(record) - notification_id = mns_notification_payload.get("id", None) # generated UUID for MNS - action_flag = mns_notification_payload.get("action") - logger.info( - "Processing message", - trace_ids={ - "notification_id": notification_id, - "message_id": message_id, - "immunisation_id": immunisation_id, - "action_flag": action_flag, - }, - ) - - mns_service = get_mns_service(mns=apigee_env) - mns_pub_response = mns_service.publish_notification(mns_notification_payload) - - if mns_pub_response["status_code"] != 200: - raise RuntimeError("MNS publish failed") - logger.info( - "Successfully created MNS notification", - trace_ids={ - "mns_notification_id": notification_id, - }, - ) - - except Exception as e: - logger.exception( - "Failed to process message", - trace_ids={ - "message_id": message_id, - "immunisation_id": immunisation_id, - "mns_notification_id": notification_id, - "error": str(e), - }, - ) - batch_item_failures.append({"itemIdentifier": message_id}) + batch_item_failures = process_records(event_records) if batch_item_failures: logger.warning(f"Batch completed with {len(batch_item_failures)} failures") @@ -64,23 +14,3 @@ def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list logger.info(f"Successfully processed all {len(event_records)} messages") return {"batchItemFailures": batch_item_failures} - - -def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: - """ - Extract identifiers for tracing from SQS record. - Returns: Tuple of (message_id, immunisation_id) - """ - sqs_message_id = record.get("messageId", "unknown") - immunisation_id = None - - try: - sqs_event_body = record.get("body", {}) - if isinstance(sqs_event_body, str): - sqs_event_body = json.loads(sqs_event_body) - - immunisation_id = sqs_event_body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") - except Exception as e: - logger.warning(f"Could not extract immunisation_id: {immunisation_id}: {e}") - - return sqs_message_id, immunisation_id diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py new file mode 100644 index 0000000000..f67301a938 --- /dev/null +++ b/lambdas/mns_publisher/src/process_records.py @@ -0,0 +1,96 @@ +import json +import os +from typing import Tuple + +from aws_lambda_typing import events +from aws_lambda_typing.events.sqs import SQSMessage + +from common.api_clients.mns_setup import get_mns_service +from common.clients import logger +from create_notification import create_mns_notification + +mns_env = os.getenv("MNS_ENV", "int") + + +def process_records(records: events.SQSEvent) -> list[dict]: + """ + Process multiple SQS records. + Args: records: List of SQS records to process + Returns: List of failed item identifiers for partial batch failure + """ + batch_item_failures = [] + mns_service = get_mns_service(mns_env=mns_env) + + for record in records: + failed_batch_item = process_record(record, mns_service) + if failed_batch_item: + batch_item_failures.append(failed_batch_item) + + return batch_item_failures + + +def process_record(record: SQSMessage, mns_service) -> str | None: + """ + Process a single SQS record. + Args: + record: SQS record containing DynamoDB stream data + mns_service: MNS service instance for publishing + Returns: Failure dict with itemIdentifier if processing failed, None if successful + """ + message_id, immunisation_id = extract_trace_ids(record) + notification_id = None + + try: + # Create notification payload + mns_notification_payload = create_mns_notification(record) + notification_id = mns_notification_payload.get("id") + action_flag = mns_notification_payload.get("filtering", {}).get("action") + logger.info( + "Processing message", + trace_ids={ + "notification_id": notification_id, + "message_id": message_id, + "immunisation_id": immunisation_id, + "action_flag": action_flag, + }, + ) + + # Publish to MNS + mns_pub_response = mns_service.publish_notification(mns_notification_payload) + if mns_pub_response["status_code"] != 200: + raise RuntimeError("MNS publish failed") + logger.info("Successfully created MNS notification", trace_ids={"mns_notification_id": notification_id}) + + return None + + except Exception as e: + logger.exception( + "Failed to process message", + trace_ids={ + "message_id": message_id, + "immunisation_id": immunisation_id, + "mns_notification_id": notification_id, + "error": str(e), + }, + ) + return {"itemIdentifier": message_id} + + +def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: + """ + Extract identifiers for tracing from SQS record. + Returns: Tuple of (message_id, immunisation_id) + """ + sqs_message_id = record.get("messageId", "unknown") + immunisation_id = None + + try: + sqs_event_body = record.get("body", {}) + if isinstance(sqs_event_body, str): + sqs_event_body = json.loads(sqs_event_body) + + immunisation_id = sqs_event_body.get("dynamodb", {}).get("NewImage", {}).get("ImmsID", {}).get("S") + except Exception as e: + logger.warning(f"Could not extract immunisation_id: {immunisation_id}: {e}") + + return sqs_message_id, immunisation_id diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index 7ade166bc0..1348591ddc 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -32,7 +32,7 @@ def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: } -def _unwrap_dynamodb_value(value) -> str: +def _unwrap_dynamodb_value(value) -> None: """ Unwrap DynamoDB type descriptor to get the actual value. DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 8ed3ebe17d..a01f2f62ef 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -3,7 +3,8 @@ from pathlib import Path from unittest.mock import Mock, patch -from lambda_handler import extract_trace_ids, lambda_handler +from lambda_handler import lambda_handler +from process_records import extract_trace_ids, process_record, process_records class TestExtractTraceIds(unittest.TestCase): @@ -66,8 +67,8 @@ def test_extract_trace_ids_missing_dynamodb_structure(self): self.assertIsNone(immunisation_id) -class TestLambdaHandler(unittest.TestCase): - """Tests for lambda_handler function.""" +class TestProcessRecord(unittest.TestCase): + """Tests for process_record function.""" @classmethod def setUpClass(cls): @@ -76,7 +77,6 @@ def setUpClass(cls): with open(sample_event_path, "r") as f: raw_event = json.load(f) - # Convert body to JSON string if it's a dict if isinstance(raw_event.get("body"), dict): raw_event["body"] = json.dumps(raw_event["body"]) @@ -88,149 +88,192 @@ def setUp(self): "id": "notif-789", "specversion": "1.0", "type": "imms-vaccinations-2", + "filtering": {"action": "CREATE"}, } + self.mock_mns_service = Mock() - self.successful_mns_response = {"status_code": 200, "message": "Published"} - - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_single_record_success_real_payload( - self, mock_logger, mock_create_notification, mock_get_mns - ): - """Test successful processing using real SQS event payload.""" - # Mock MNS service instance - mock_mns_service = Mock() - mock_mns_service.publish_notification.return_value = self.successful_mns_response - mock_get_mns.return_value = mock_mns_service - + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_success(self, mock_logger, mock_create_notification): + """Test successful processing of a single record.""" mock_create_notification.return_value = self.sample_notification + self.mock_mns_service.publish_notification.return_value = {"status_code": 200} - event = {"Records": [self.sample_sqs_record]} - result = lambda_handler(event, Mock()) + result = process_record(self.sample_sqs_record, self.mock_mns_service) - self.assertEqual(result, {"batchItemFailures": []}) + self.assertIsNone(result) mock_create_notification.assert_called_once_with(self.sample_sqs_record) - mock_mns_service.publish_notification.assert_called_once_with(self.sample_notification) + self.mock_mns_service.publish_notification.assert_called_once_with(self.sample_notification) mock_logger.exception.assert_not_called() - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_multiple_records_all_success(self, mock_logger, mock_create_notification, mock_get_mns): - """Test successful processing of multiple SQS records.""" - mock_mns_service = Mock() - mock_mns_service.publish_notification.return_value = self.successful_mns_response - mock_get_mns.return_value = mock_mns_service + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_create_notification_failure(self, mock_logger, mock_create_notification): + """Test handling when notification creation fails.""" + mock_create_notification.side_effect = Exception("Creation error") + + result = process_record(self.sample_sqs_record, self.mock_mns_service) + self.assertEqual(result, {"itemIdentifier": "98ed30eb-829f-41df-8a73-57fef70cf161"}) + mock_logger.exception.assert_called_once() + self.mock_mns_service.publish_notification.assert_not_called() + + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_publish_failure(self, mock_logger, mock_create_notification): + """Test handling when MNS publish fails.""" mock_create_notification.return_value = self.sample_notification + self.mock_mns_service.publish_notification.side_effect = Exception("Publish error") - record_2 = self.sample_sqs_record.copy() - record_2["messageId"] = "different-message-id" + result = process_record(self.sample_sqs_record, self.mock_mns_service) - event = {"Records": [self.sample_sqs_record, record_2]} - result = lambda_handler(event, Mock()) + self.assertEqual(result, {"itemIdentifier": "98ed30eb-829f-41df-8a73-57fef70cf161"}) + mock_logger.exception.assert_called_once() - self.assertEqual(result, {"batchItemFailures": []}) - self.assertEqual(mock_create_notification.call_count, 2) - self.assertEqual(mock_mns_service.publish_notification.call_count, 2) - mock_logger.exception.assert_not_called() + @patch("process_records.create_mns_notification") + @patch("process_records.logger") + def test_process_record_logs_trace_ids(self, mock_logger, mock_create_notification): + """Test that trace IDs are logged correctly.""" + mock_create_notification.return_value = self.sample_notification - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_single_record_failure(self, mock_logger, mock_create_notification, mock_get_mns): - """Test handling of a single record failure.""" - mock_mns_service = Mock() - mock_get_mns.return_value = mock_mns_service + process_record(self.sample_sqs_record, self.mock_mns_service) - mock_create_notification.side_effect = Exception("Processing error") + # Check info log was called with trace IDs + info_calls = [call for call in mock_logger.info.call_args_list if "Processing message" in str(call)] + self.assertEqual(len(info_calls), 1) - event = {"Records": [self.sample_sqs_record]} - result = lambda_handler(event, Mock()) - expected_message_id = self.sample_sqs_record["messageId"] - self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) - mock_logger.exception.assert_called_once() - mock_logger.warning.assert_called_once_with("Batch completed with 1 failures") - mock_mns_service.publish_notification.assert_not_called() +class TestProcessRecords(unittest.TestCase): + """Tests for process_records function.""" - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_mns_publish_failure(self, mock_logger, mock_create_notification, mock_get_mns): - """Test handling when MNS publish returns non-201 status.""" + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + cls.sample_sqs_record = raw_event + + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_all_success(self, mock_process_record, mock_get_mns): + """Test processing multiple records with all successes.""" mock_mns_service = Mock() - mock_mns_service.publish_notification.return_value = {"status_code": 400, "message": "Bad Request"} mock_get_mns.return_value = mock_mns_service + mock_process_record.return_value = None # Success - mock_create_notification.return_value = self.sample_notification + record_2 = self.sample_sqs_record.copy() + record_2["messageId"] = "different-id" + records = [self.sample_sqs_record, record_2] - event = {"Records": [self.sample_sqs_record]} - result = lambda_handler(event, Mock()) + result = process_records(records) - expected_message_id = self.sample_sqs_record["messageId"] - self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": expected_message_id}]}) - mock_logger.exception.assert_called_once() + self.assertEqual(result, []) + self.assertEqual(mock_process_record.call_count, 2) + mock_get_mns.assert_called_once() - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_partial_batch_failure(self, mock_logger, mock_create_notification, mock_get_mns): - """Test partial batch failure where one record succeeds and one fails.""" + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_partial_failure(self, mock_process_record, mock_get_mns): + """Test processing with some failures.""" mock_mns_service = Mock() - mock_mns_service.publish_notification.side_effect = [self.successful_mns_response, Exception("MNS API error")] mock_get_mns.return_value = mock_mns_service - - mock_create_notification.return_value = self.sample_notification + mock_process_record.side_effect = [ + None, # Success + {"itemIdentifier": "msg-456"}, # Failure + ] record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "msg-456" + records = [self.sample_sqs_record, record_2] - event = {"Records": [self.sample_sqs_record, record_2]} - result = lambda_handler(event, Mock()) + result = process_records(records) - self.assertEqual(len(result["batchItemFailures"]), 1) - self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") - self.assertEqual(mock_create_notification.call_count, 2) - mock_logger.exception.assert_called_once() + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["itemIdentifier"], "msg-456") - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_empty_records(self, mock_logger, mock_create_notification, mock_get_mns): - """Test handling of empty Records list.""" + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_empty_list(self, mock_process_record, mock_get_mns): + """Test processing empty record list.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service - event = {"Records": []} - result = lambda_handler(event, Mock()) + result = process_records([]) - self.assertEqual(result, {"batchItemFailures": []}) - mock_create_notification.assert_not_called() - mock_mns_service.publish_notification.assert_not_called() - mock_logger.info.assert_called_with("Successfully processed all 0 messages") + self.assertEqual(result, []) + mock_process_record.assert_not_called() - @patch("lambda_handler.get_mns_service") - @patch("lambda_handler.create_mns_notification") - @patch("lambda_handler.logger") - def test_lambda_handler_logs_correct_trace_ids_on_failure(self, mock_logger, mock_create_notification, mock_get_mns): - """Test that all trace IDs are logged when an error occurs.""" + @patch("process_records.get_mns_service") + @patch("process_records.process_record") + def test_process_records_mns_service_created_once(self, mock_process_record, mock_get_mns): + """Test that MNS service is created only once for batch.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service + mock_process_record.return_value = None + + records = [self.sample_sqs_record, self.sample_sqs_record, self.sample_sqs_record] - mock_create_notification.side_effect = Exception("Test error") + process_records(records) + + mock_get_mns.assert_called_once() # Only created once + + +class TestLambdaHandler(unittest.TestCase): + """Tests for lambda_handler function.""" + + @classmethod + def setUpClass(cls): + """Load the sample SQS event once for all tests.""" + sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + cls.sample_sqs_record = raw_event + + @patch("lambda_handler.process_records") + @patch("lambda_handler.logger") + def test_lambda_handler_all_success(self, mock_logger, mock_process_records): + """Test lambda handler with all records succeeding.""" + mock_process_records.return_value = [] + + event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_process_records.assert_called_once_with([self.sample_sqs_record]) + mock_logger.info.assert_called_with("Successfully processed all 1 messages") + + @patch("lambda_handler.process_records") + @patch("lambda_handler.logger") + def test_lambda_handler_with_failures(self, mock_logger, mock_process_records): + """Test lambda handler with some failures.""" + mock_process_records.return_value = [{"itemIdentifier": "msg-123"}] event = {"Records": [self.sample_sqs_record]} - lambda_handler(event, Mock()) + result = lambda_handler(event, Mock()) - exception_call = mock_logger.exception.call_args - self.assertEqual(exception_call[0][0], "Failed to process message") - trace_ids = exception_call[1]["trace_ids"] + self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": "msg-123"}]}) + mock_logger.warning.assert_called_with("Batch completed with 1 failures") - self.assertEqual(trace_ids["message_id"], "98ed30eb-829f-41df-8a73-57fef70cf161") - self.assertEqual(trace_ids["immunisation_id"], "d058014c-b0fd-4471-8db9-3316175eb825") - self.assertEqual(trace_ids["error"], "Test error") + @patch("lambda_handler.process_records") + @patch("lambda_handler.logger") + def test_lambda_handler_empty_records(self, mock_logger, mock_process_records): + """Test lambda handler with no records.""" + mock_process_records.return_value = [] + + event = {"Records": []} + result = lambda_handler(event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + mock_process_records.assert_called_once_with([]) if __name__ == "__main__": diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index ba3d412b17..d7ab234673 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -12,9 +12,10 @@ SQS_ARN = os.getenv("SQS_ARN") apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") +mns_env = os.getenv("MNS_ENV", "int") MNS_BASE_URL = ( "https://api.service.nhs.uk/multicast-notification-service" - if apigee_env == "prod" + if apigee_env or mns_env == "prod" else "https://int.api.service.nhs.uk/multicast-notification-service" ) diff --git a/lambdas/shared/src/common/api_clients/mns_setup.py b/lambdas/shared/src/common/api_clients/mns_setup.py index 2e3d6f3863..94d06f1198 100644 --- a/lambdas/shared/src/common/api_clients/mns_setup.py +++ b/lambdas/shared/src/common/api_clients/mns_setup.py @@ -14,7 +14,6 @@ def get_mns_service(mns_env: str = "int"): boto_config = Config(region_name="eu-west-2") cache = Cache(directory="/tmp") # NOSONAR(S5443) logging.info("Creating authenticator...") - # TODO: MNS and PDS need separate secrets authenticator = AppRestrictedAuth( service=Service.PDS, secret_manager_client=boto3.client("secretsmanager", config=boto_config), From afe7bc974014c37070eaddc5b263c2d08e8ea7a4 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 24 Feb 2026 13:35:47 +0000 Subject: [PATCH 18/31] fix sonar and terraform issues --- lambdas/mns_publisher/src/process_records.py | 2 +- lambdas/mns_publisher/src/sqs_dynamo_utils.py | 3 ++- lambdas/shared/src/common/api_clients/mns_service.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index f67301a938..31e7f56168 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -29,7 +29,7 @@ def process_records(records: events.SQSEvent) -> list[dict]: return batch_item_failures -def process_record(record: SQSMessage, mns_service) -> str | None: +def process_record(record: SQSMessage, mns_service) -> dict | None: """ Process a single SQS record. Args: diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index 1348591ddc..1e7fa2dfda 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -1,4 +1,5 @@ import json +from typing import Any from constants import DYNAMO_DB_TYPE_DESCRIPTORS, ImmsData @@ -32,7 +33,7 @@ def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: } -def _unwrap_dynamodb_value(value) -> None: +def _unwrap_dynamodb_value(value) -> Any: """ Unwrap DynamoDB type descriptor to get the actual value. DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index d7ab234673..ac36d9217d 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -13,9 +13,10 @@ apigee_env = os.getenv("APIGEE_ENVIRONMENT", "int") mns_env = os.getenv("MNS_ENV", "int") +env = apigee_env or mns_env MNS_BASE_URL = ( "https://api.service.nhs.uk/multicast-notification-service" - if apigee_env or mns_env == "prod" + if env == "prod" else "https://int.api.service.nhs.uk/multicast-notification-service" ) From 0297ee52faddfea24e7f26aded1e22f3d73c32de Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 24 Feb 2026 14:29:56 +0000 Subject: [PATCH 19/31] remove pdsSync found in id_sync --- lambdas/id_sync/src/ieds_db_operations.py | 6 +++--- lambdas/id_sync/src/record_processor.py | 4 ++-- lambdas/id_sync/tests/test_record_processor.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lambdas/id_sync/src/ieds_db_operations.py b/lambdas/id_sync/src/ieds_db_operations.py index ef42f6d04b..ea308f92ca 100644 --- a/lambdas/id_sync/src/ieds_db_operations.py +++ b/lambdas/id_sync/src/ieds_db_operations.py @@ -60,7 +60,7 @@ def get_items_from_patient_id(id: str) -> list: """Public wrapper: build PatientPK and return all matching items. Delegates actual paging to the internal helper `_paginate_items_for_patient_pk`. - Raises PdsSyncException on error. + Raises IdSyncException on error. """ patient_pk = f"Patient#{id}" try: @@ -77,7 +77,7 @@ def get_items_from_patient_id(id: str) -> list: def paginate_items_for_patient_pk(patient_pk: str) -> list: """Internal helper that pages through the PatientGSI and returns all items. - Raises PdsSyncException when the DynamoDB response is malformed. + Raises IdSyncException when the DynamoDB response is malformed. """ all_items: list = [] last_evaluated_key = None @@ -92,7 +92,7 @@ def paginate_items_for_patient_pk(patient_pk: str) -> list: response = get_ieds_table().query(**query_args) if "Items" not in response: - # Unexpected DynamoDB response shape - surface as PdsSyncException + # Unexpected DynamoDB response shape - surface as IdSyncException logger.exception("Unexpected DynamoDB response: missing 'Items'") raise IdSyncException( message="No Items in DynamoDB response", diff --git a/lambdas/id_sync/src/record_processor.py b/lambdas/id_sync/src/record_processor.py index a0294d49d2..2034cba0b5 100644 --- a/lambdas/id_sync/src/record_processor.py +++ b/lambdas/id_sync/src/record_processor.py @@ -1,9 +1,9 @@ import json from typing import Any, Dict -from common.api_clients.errors import PdsSyncException from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger +from exceptions.id_sync_exception import IdSyncException from ieds_db_operations import ( IDENTIFIER_KEY, extract_patient_resource_from_item, @@ -37,7 +37,7 @@ def process_record(event_record: Dict[str, Any]) -> Dict[str, Any]: def process_nhs_number(nhs_number: str) -> Dict[str, Any]: try: pds_patient_resource = pds_get_patient_details(nhs_number) - except PdsSyncException as e: + except IdSyncException as e: return make_status(str(e), status="error") if not pds_patient_resource: diff --git a/lambdas/id_sync/tests/test_record_processor.py b/lambdas/id_sync/tests/test_record_processor.py index 025abb8250..7af754e0ad 100644 --- a/lambdas/id_sync/tests/test_record_processor.py +++ b/lambdas/id_sync/tests/test_record_processor.py @@ -2,7 +2,7 @@ import unittest from unittest.mock import call, patch -from common.api_clients.errors import PdsSyncException +from exceptions.id_sync_exception import IdSyncException from record_processor import process_record @@ -194,7 +194,7 @@ def test_pds_details_exception_aborts_update(self): test_sqs_record = {"body": json.dumps({"subject": nhs_number})} # pds returns a different id to force update path self.mock_get_items_from_patient_id.return_value = [{"Resource": {}}] - self.mock_pds_get_patient_details.side_effect = PdsSyncException("Error retrieving patient details from PDS") + self.mock_pds_get_patient_details.side_effect = IdSyncException("Error retrieving patient details from PDS") result = process_record(test_sqs_record) self.assertEqual(result["status"], "error") From a96582a246dc91219521a7cdb910b49014deace6 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 24 Feb 2026 17:10:51 +0000 Subject: [PATCH 20/31] resolve type hinting and remove try catch --- .../mns_publisher/src/create_notification.py | 48 ++++++------ lambdas/mns_publisher/src/lambda_handler.py | 9 +-- lambdas/mns_publisher/src/process_records.py | 76 +++++++++--------- lambdas/mns_publisher/src/sqs_dynamo_utils.py | 2 +- .../tests/test_create_notification.py | 6 +- .../tests/test_lambda_handler.py | 77 ++++++++----------- .../src/common/api_clients/mns_service.py | 1 - 7 files changed, 94 insertions(+), 125 deletions(-) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index ff0ce760fd..a60733feb5 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -63,36 +63,32 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: def get_practitioner_details_from_pds(nhs_number: str) -> str | None: - try: - patient_details = pds_get_patient_details(nhs_number) + patient_details = pds_get_patient_details(nhs_number) - general_practitioners = patient_details.get("generalPractitioner", []) - if not general_practitioners or len(general_practitioners) == 0: - logger.warning("No GP details found for patient") - return None + general_practitioners = patient_details.get("generalPractitioner", []) + if not general_practitioners or len(general_practitioners) == 0: + logger.warning("No GP details found for patient") + return None - patient_gp = general_practitioners[0] - patient_gp_identifier = patient_gp.get("identifier", {}) + patient_gp = general_practitioners[0] + patient_gp_identifier = patient_gp.get("identifier", {}) - gp_ods_code = patient_gp_identifier.get("value") - if not gp_ods_code: - logger.warning("GP ODS code not found in practitioner details") - return None + gp_ods_code = patient_gp_identifier.get("value") + if not gp_ods_code: + logger.warning("GP ODS code not found in practitioner details") + return None - # Check if registration is current - period = patient_gp_identifier.get("period", {}) - gp_period_end_date = period.get("end", None) + # Check if registration is current + period = patient_gp_identifier.get("period", {}) + gp_period_end_date = period.get("end", None) - if gp_period_end_date: - # Parse end date (format: YYYY-MM-DD) - end_date = datetime.strptime(gp_period_end_date, "%Y-%m-%d").date() - today = datetime.now().date() + if gp_period_end_date: + # Parse end date (format: YYYY-MM-DD) + end_date = datetime.strptime(gp_period_end_date, "%Y-%m-%d").date() + today = datetime.now().date() - if end_date < today: - logger.warning("GP registration has ended") - return None + if end_date < today: + logger.warning("No current GP registration found for patient") + return None - return gp_ods_code - except Exception as error: - logger.exception("Failed to get practitioner details from pds", error) - raise + return gp_ods_code diff --git a/lambdas/mns_publisher/src/lambda_handler.py b/lambdas/mns_publisher/src/lambda_handler.py index 670c6f8677..81e1bff27c 100644 --- a/lambdas/mns_publisher/src/lambda_handler.py +++ b/lambdas/mns_publisher/src/lambda_handler.py @@ -1,16 +1,9 @@ from aws_lambda_typing import context, events -from common.clients import logger from process_records import process_records def lambda_handler(event: events.SQSEvent, _: context.Context) -> dict[str, list]: event_records = event.get("Records", []) - batch_item_failures = process_records(event_records) - if batch_item_failures: - logger.warning(f"Batch completed with {len(batch_item_failures)} failures") - else: - logger.info(f"Successfully processed all {len(event_records)} messages") - - return {"batchItemFailures": batch_item_failures} + return process_records(event_records) diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index 31e7f56168..915a2ec43b 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -2,9 +2,9 @@ import os from typing import Tuple -from aws_lambda_typing import events from aws_lambda_typing.events.sqs import SQSMessage +from common.api_clients.mns_service import MnsService from common.api_clients.mns_setup import get_mns_service from common.clients import logger from create_notification import create_mns_notification @@ -12,7 +12,7 @@ mns_env = os.getenv("MNS_ENV", "int") -def process_records(records: events.SQSEvent) -> list[dict]: +def process_records(records: list[SQSMessage]) -> list[dict]: """ Process multiple SQS records. Args: records: List of SQS records to process @@ -22,14 +22,23 @@ def process_records(records: events.SQSEvent) -> list[dict]: mns_service = get_mns_service(mns_env=mns_env) for record in records: - failed_batch_item = process_record(record, mns_service) - if failed_batch_item: - batch_item_failures.append(failed_batch_item) + try: + failed_batch_item = process_record(record, mns_service) + if failed_batch_item: + batch_item_failures.append(failed_batch_item) + except Exception: + message_id = record.get("messageId", "unknown") + batch_item_failures.append({"itemIdentifier": message_id}) - return batch_item_failures + if batch_item_failures: + logger.warning(f"Batch completed with {len(batch_item_failures)} failures") + else: + logger.info(f"Successfully processed all {len(records)} messages") + return {"batchItemFailures": batch_item_failures} -def process_record(record: SQSMessage, mns_service) -> dict | None: + +def process_record(record: SQSMessage, mns_service: MnsService) -> dict | None: """ Process a single SQS record. Args: @@ -40,40 +49,25 @@ def process_record(record: SQSMessage, mns_service) -> dict | None: message_id, immunisation_id = extract_trace_ids(record) notification_id = None - try: - # Create notification payload - mns_notification_payload = create_mns_notification(record) - notification_id = mns_notification_payload.get("id") - action_flag = mns_notification_payload.get("filtering", {}).get("action") - logger.info( - "Processing message", - trace_ids={ - "notification_id": notification_id, - "message_id": message_id, - "immunisation_id": immunisation_id, - "action_flag": action_flag, - }, - ) - - # Publish to MNS - mns_pub_response = mns_service.publish_notification(mns_notification_payload) - if mns_pub_response["status_code"] != 200: - raise RuntimeError("MNS publish failed") - logger.info("Successfully created MNS notification", trace_ids={"mns_notification_id": notification_id}) - - return None - - except Exception as e: - logger.exception( - "Failed to process message", - trace_ids={ - "message_id": message_id, - "immunisation_id": immunisation_id, - "mns_notification_id": notification_id, - "error": str(e), - }, - ) - return {"itemIdentifier": message_id} + # Create notification payload + mns_notification_payload = create_mns_notification(record) + notification_id = mns_notification_payload.get("id") + action_flag = mns_notification_payload.get("filtering", {}).get("action") + logger.info( + "Processing message", + trace_ids={ + "notification_id": notification_id, + "message_id": message_id, + "immunisation_id": immunisation_id, + "action_flag": action_flag, + }, + ) + + # Publish to MNS + mns_service.publish_notification(mns_notification_payload) + logger.info("Successfully created MNS notification", trace_ids={"mns_notification_id": notification_id}) + + return None def extract_trace_ids(record: SQSMessage) -> Tuple[str, str | None]: diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index 1e7fa2dfda..edb09e4e30 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -33,7 +33,7 @@ def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: } -def _unwrap_dynamodb_value(value) -> Any: +def _unwrap_dynamodb_value(value: dict) -> Any: """ Unwrap DynamoDB type descriptor to get the actual value. DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 85890ea2e0..90f9d90755 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -143,12 +143,10 @@ def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): """Test when PDS API raises exception.""" mock_pds_get.side_effect = Exception("PDS API error") - with self.assertRaises(Exception) as context: get_practitioner_details_from_pds("9481152782") - - self.assertEqual(str(context.exception), "PDS API error") - mock_logger.exception.assert_called_once() + self.assertEqual(str(context.exception), "PDS API error") + mock_logger.exception.assert_called_once() @patch("create_notification.pds_get_patient_details") @patch("create_notification.logger") diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index a01f2f62ef..153f23d949 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -87,7 +87,7 @@ def setUp(self): self.sample_notification = { "id": "notif-789", "specversion": "1.0", - "type": "imms-vaccinations-2", + "type": "imms-vaccinations-1", "filtering": {"action": "CREATE"}, } self.mock_mns_service = Mock() @@ -97,11 +97,11 @@ def setUp(self): def test_process_record_success(self, mock_logger, mock_create_notification): """Test successful processing of a single record.""" mock_create_notification.return_value = self.sample_notification - self.mock_mns_service.publish_notification.return_value = {"status_code": 200} + self.mock_mns_service.publish_notification.return_value = None - result = process_record(self.sample_sqs_record, self.mock_mns_service) + # Should not raise exception + process_record(self.sample_sqs_record, self.mock_mns_service) - self.assertIsNone(result) mock_create_notification.assert_called_once_with(self.sample_sqs_record) self.mock_mns_service.publish_notification.assert_called_once_with(self.sample_notification) mock_logger.exception.assert_not_called() @@ -112,10 +112,10 @@ def test_process_record_create_notification_failure(self, mock_logger, mock_crea """Test handling when notification creation fails.""" mock_create_notification.side_effect = Exception("Creation error") - result = process_record(self.sample_sqs_record, self.mock_mns_service) + # Should raise exception + with self.assertRaises(Exception): + process_record(self.sample_sqs_record, self.mock_mns_service) - self.assertEqual(result, {"itemIdentifier": "98ed30eb-829f-41df-8a73-57fef70cf161"}) - mock_logger.exception.assert_called_once() self.mock_mns_service.publish_notification.assert_not_called() @patch("process_records.create_mns_notification") @@ -125,22 +125,9 @@ def test_process_record_publish_failure(self, mock_logger, mock_create_notificat mock_create_notification.return_value = self.sample_notification self.mock_mns_service.publish_notification.side_effect = Exception("Publish error") - result = process_record(self.sample_sqs_record, self.mock_mns_service) - - self.assertEqual(result, {"itemIdentifier": "98ed30eb-829f-41df-8a73-57fef70cf161"}) - mock_logger.exception.assert_called_once() - - @patch("process_records.create_mns_notification") - @patch("process_records.logger") - def test_process_record_logs_trace_ids(self, mock_logger, mock_create_notification): - """Test that trace IDs are logged correctly.""" - mock_create_notification.return_value = self.sample_notification - - process_record(self.sample_sqs_record, self.mock_mns_service) - - # Check info log was called with trace IDs - info_calls = [call for call in mock_logger.info.call_args_list if "Processing message" in str(call)] - self.assertEqual(len(info_calls), 1) + # Should raise exception + with self.assertRaises(Exception): + process_record(self.sample_sqs_record, self.mock_mns_service) class TestProcessRecords(unittest.TestCase): @@ -158,13 +145,14 @@ def setUpClass(cls): cls.sample_sqs_record = raw_event + @patch("process_records.logger") @patch("process_records.get_mns_service") @patch("process_records.process_record") - def test_process_records_all_success(self, mock_process_record, mock_get_mns): + def test_process_records_all_success(self, mock_process_record, mock_get_mns, mock_logger): """Test processing multiple records with all successes.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service - mock_process_record.return_value = None # Success + mock_process_record.return_value = None # No exception record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "different-id" @@ -172,19 +160,21 @@ def test_process_records_all_success(self, mock_process_record, mock_get_mns): result = process_records(records) - self.assertEqual(result, []) + self.assertEqual(result, {"batchItemFailures": []}) self.assertEqual(mock_process_record.call_count, 2) mock_get_mns.assert_called_once() + mock_logger.info.assert_called_with("Successfully processed all 2 messages") + @patch("process_records.logger") @patch("process_records.get_mns_service") @patch("process_records.process_record") - def test_process_records_partial_failure(self, mock_process_record, mock_get_mns): + def test_process_records_partial_failure(self, mock_process_record, mock_get_mns, mock_logger): """Test processing with some failures.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service mock_process_record.side_effect = [ None, # Success - {"itemIdentifier": "msg-456"}, # Failure + Exception("Processing error"), # Failure ] record_2 = self.sample_sqs_record.copy() @@ -193,24 +183,28 @@ def test_process_records_partial_failure(self, mock_process_record, mock_get_mns result = process_records(records) - self.assertEqual(len(result), 1) - self.assertEqual(result[0]["itemIdentifier"], "msg-456") + self.assertEqual(len(result["batchItemFailures"]), 1) + self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") + mock_logger.warning.assert_called_with("Batch completed with 1 failures") + @patch("process_records.logger") @patch("process_records.get_mns_service") @patch("process_records.process_record") - def test_process_records_empty_list(self, mock_process_record, mock_get_mns): + def test_process_records_empty_list(self, mock_process_record, mock_get_mns, mock_logger): """Test processing empty record list.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service result = process_records([]) - self.assertEqual(result, []) + self.assertEqual(result, {"batchItemFailures": []}) mock_process_record.assert_not_called() + mock_logger.info.assert_called_with("Successfully processed all 0 messages") + @patch("process_records.logger") @patch("process_records.get_mns_service") @patch("process_records.process_record") - def test_process_records_mns_service_created_once(self, mock_process_record, mock_get_mns): + def test_process_records_mns_service_created_once(self, mock_process_record, mock_get_mns, mock_logger): """Test that MNS service is created only once for batch.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service @@ -239,35 +233,30 @@ def setUpClass(cls): cls.sample_sqs_record = raw_event @patch("lambda_handler.process_records") - @patch("lambda_handler.logger") - def test_lambda_handler_all_success(self, mock_logger, mock_process_records): + def test_lambda_handler_all_success(self, mock_process_records): """Test lambda handler with all records succeeding.""" - mock_process_records.return_value = [] + mock_process_records.return_value = {"batchItemFailures": []} event = {"Records": [self.sample_sqs_record]} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": []}) mock_process_records.assert_called_once_with([self.sample_sqs_record]) - mock_logger.info.assert_called_with("Successfully processed all 1 messages") @patch("lambda_handler.process_records") - @patch("lambda_handler.logger") - def test_lambda_handler_with_failures(self, mock_logger, mock_process_records): + def test_lambda_handler_with_failures(self, mock_process_records): """Test lambda handler with some failures.""" - mock_process_records.return_value = [{"itemIdentifier": "msg-123"}] + mock_process_records.return_value = {"batchItemFailures": [{"itemIdentifier": "msg-123"}]} event = {"Records": [self.sample_sqs_record]} result = lambda_handler(event, Mock()) self.assertEqual(result, {"batchItemFailures": [{"itemIdentifier": "msg-123"}]}) - mock_logger.warning.assert_called_with("Batch completed with 1 failures") @patch("lambda_handler.process_records") - @patch("lambda_handler.logger") - def test_lambda_handler_empty_records(self, mock_logger, mock_process_records): + def test_lambda_handler_empty_records(self, mock_process_records): """Test lambda handler with no records.""" - mock_process_records.return_value = [] + mock_process_records.return_value = {"batchItemFailures": []} event = {"Records": []} result = lambda_handler(event, Mock()) diff --git a/lambdas/shared/src/common/api_clients/mns_service.py b/lambdas/shared/src/common/api_clients/mns_service.py index ac36d9217d..ed3a15e291 100644 --- a/lambdas/shared/src/common/api_clients/mns_service.py +++ b/lambdas/shared/src/common/api_clients/mns_service.py @@ -79,7 +79,6 @@ def get_subscription(self) -> dict | None: headers = self._build_headers() response = request_with_retry_backoff("GET", f"{MNS_BASE_URL}/subscriptions", headers, timeout=10) logging.info(f"GET {MNS_BASE_URL}/subscriptions") - logging.debug(f"Headers: {headers}") if response.status_code == 200: bundle = response.json() From a925afded72e2dd341fe88ab711c045748eea0ad Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 09:37:09 +0000 Subject: [PATCH 21/31] add secrets for tf and resolve sonar issues --- infrastructure/instance/mns_publisher.tf | 2 +- .../mns_publisher/mns_publisher_lambda.tf | 26 ++++++++++++++++++- .../modules/mns_publisher/variables.tf | 4 +-- lambdas/mns_publisher/src/process_records.py | 6 ++--- 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index 28d4e62067..b1ae89f096 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -8,7 +8,7 @@ module "mns_publisher" { immunisation_account_id = var.immunisation_account_id is_temp = local.is_temp resource_scope = local.resource_scope - sub_environment = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" + imms-base-path = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" pds_environment = var.pds_environment diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index 20a9136cc0..e16fbda0bc 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -193,7 +193,7 @@ resource "aws_lambda_function" "mns_publisher_lambda" { variables = { SPLUNK_FIREHOSE_NAME = var.splunk_firehose_stream_name IMMUNIZATION_ENV = var.resource_scope, - IMMUNIZATION_BASE_PATH = var.sub_environment + IMMUNIZATION_BASE_PATH = var.imms_base_path PDS_ENV = var.pds_environment MNS_ENV = var.mns_environment } @@ -207,6 +207,30 @@ resource "aws_lambda_function" "mns_publisher_lambda" { ] } + +data "aws_iam_policy_document" "mns_publisher_secrets_policy_document" { + source_policy_documents = [ + templatefile("${local.policy_path}/secret_manager.json", { + "account_id" : data.aws_caller_identity.current.account_id, + "pds_environment" : var.pds_environment + }), + ] +} + +resource "aws_iam_policy" "mns_publisher_lambda_secrets_policy" { + name = "${local.mns_publisher_lambda_name}-secrets-policy" + description = "Allow Lambda to access Secrets Manager" + policy = data.aws_iam_policy_document.mns_publisher_secrets_policy_document.json +} + + +# Attach the secrets/dynamodb access policy to the Lambda role +resource "aws_iam_role_policy_attachment" "mns_publisher_lambda_secrets_policy_attachment" { + role = aws_iam_role.mns_publisher_lambda_exec_role.name + policy_arn = aws_iam_policy.mns_publish_lambda_secrets_policy.arn +} + + resource "aws_cloudwatch_log_group" "mns_publisher_lambda_log_group" { name = "/aws/lambda/${local.mns_publisher_lambda_name}" retention_in_days = 30 diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index 383ecace71..3d783fed09 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -81,9 +81,9 @@ variable "resource_scope" { EOT } -variable "sub_environment" { +variable "imms_base_path" { type = string - description = "Sub-environment name, e.g. internal-dev, internal-qa. The value is set in the Makefile" + description = "Base path for the Immunisation FHIR API. Used to construct environment-specific routes (e.g. PR preview paths or default R4 path)." } variable "mns_environment" { diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index 915a2ec43b..74e2577802 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -12,7 +12,7 @@ mns_env = os.getenv("MNS_ENV", "int") -def process_records(records: list[SQSMessage]) -> list[dict]: +def process_records(records: list[SQSMessage]) -> dict[str, list]: """ Process multiple SQS records. Args: records: List of SQS records to process @@ -23,9 +23,7 @@ def process_records(records: list[SQSMessage]) -> list[dict]: for record in records: try: - failed_batch_item = process_record(record, mns_service) - if failed_batch_item: - batch_item_failures.append(failed_batch_item) + process_record(record, mns_service) except Exception: message_id = record.get("messageId", "unknown") batch_item_failures.append({"itemIdentifier": message_id}) From 1ef11247fa3b87c57c568b05fa2a28a19172f78c Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 10:34:57 +0000 Subject: [PATCH 22/31] secret configurations --- infrastructure/instance/mns_publisher.tf | 4 +++- .../modules/mns_publisher/mns_publisher_lambda.tf | 6 +++--- .../instance/modules/mns_publisher/variables.tf | 10 ++++++++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/infrastructure/instance/mns_publisher.tf b/infrastructure/instance/mns_publisher.tf index b1ae89f096..c5a99dab6f 100644 --- a/infrastructure/instance/mns_publisher.tf +++ b/infrastructure/instance/mns_publisher.tf @@ -8,9 +8,11 @@ module "mns_publisher" { immunisation_account_id = var.immunisation_account_id is_temp = local.is_temp resource_scope = local.resource_scope - imms-base-path = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" + imms_base_path = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4" lambda_kms_encryption_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn mns_publisher_resource_name_prefix = "${local.resource_scope}-mns-outbound-events" + secrets_manager_policy_path = "${local.policy_path}/secret_manager.json" + account_id = data.aws_caller_identity.current.account_id pds_environment = var.pds_environment mns_environment = var.mns_environment diff --git a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf index e16fbda0bc..a99783f0b6 100644 --- a/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf +++ b/infrastructure/instance/modules/mns_publisher/mns_publisher_lambda.tf @@ -210,8 +210,8 @@ resource "aws_lambda_function" "mns_publisher_lambda" { data "aws_iam_policy_document" "mns_publisher_secrets_policy_document" { source_policy_documents = [ - templatefile("${local.policy_path}/secret_manager.json", { - "account_id" : data.aws_caller_identity.current.account_id, + templatefile("${var.secrets_manager_policy_path}", { + "account_id" : var.account_id, "pds_environment" : var.pds_environment }), ] @@ -227,7 +227,7 @@ resource "aws_iam_policy" "mns_publisher_lambda_secrets_policy" { # Attach the secrets/dynamodb access policy to the Lambda role resource "aws_iam_role_policy_attachment" "mns_publisher_lambda_secrets_policy_attachment" { role = aws_iam_role.mns_publisher_lambda_exec_role.name - policy_arn = aws_iam_policy.mns_publish_lambda_secrets_policy.arn + policy_arn = aws_iam_policy.mns_publisher_lambda_secrets_policy.arn } diff --git a/infrastructure/instance/modules/mns_publisher/variables.tf b/infrastructure/instance/modules/mns_publisher/variables.tf index 3d783fed09..fb691a7978 100644 --- a/infrastructure/instance/modules/mns_publisher/variables.tf +++ b/infrastructure/instance/modules/mns_publisher/variables.tf @@ -93,3 +93,13 @@ variable "mns_environment" { variable "pds_environment" { type = string } + +variable "account_id" { + type = string + description = "AWS account ID used for IAM policy templating (e.g., Secrets Manager ARNs)." +} + +variable "secrets_manager_policy_path" { + type = string + description = "Path to the IAM policy JSON template for Secrets Manager access (e.g., ./policies/secret_manager.json)." +} \ No newline at end of file From 96a4a13fbe0f98a082b3bc8437cbe2f698ff4c44 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 15:40:26 +0000 Subject: [PATCH 23/31] fix e2e as base isn't master --- .../tests/test_create_notification.py | 338 ++++++++---------- .../utilities/error_constants.py | 2 +- 2 files changed, 146 insertions(+), 194 deletions(-) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 90f9d90755..6fabf14682 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -12,169 +12,76 @@ ) +def _load_sample_sqs_event() -> dict: + """ + Loads the sample SQS event and normalises body to a JSON string (as SQS delivers it). + Expects: lambdas/mns_publisher/tests/sqs_event.json + """ + sample_event_path = Path(__file__).parent / "sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + # Ensure body is a JSON string (SQS behaviour) + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + return raw_event + + class TestCalculateAgeAtVaccination(unittest.TestCase): """Tests for age calculation at vaccination time.""" def test_age_calculation_yyyymmdd_format(self): - """Test age calculation with YYYYMMDD format.""" birth_date = "20040609" vaccination_date = "20260212" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 21) def test_age_calculation_with_time(self): - """Test age calculation with YYYYMMDDTHHmmss format.""" birth_date = "20040609T120000" vaccination_date = "20260212T174437" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 21) def test_age_calculation_after_birthday(self): - """Test age when vaccination is after birthday.""" birth_date = "20040609" vaccination_date = "20260815" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 22) def test_age_calculation_on_birthday(self): - """Test age when vaccination is on birthday.""" birth_date = "20040609" vaccination_date = "20260609" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 22) def test_age_calculation_infant(self): - """Test age calculation for infant (less than 1 year old).""" birth_date = "20260609" vaccination_date = "20260915" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 0) def test_age_calculation_leap_year_birthday(self): - """Test age calculation with leap year birthday.""" birth_date = "20000229" vaccination_date = "20240228" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 23) def test_age_calculation_same_day_different_year(self): - """Test age calculation for same day in different year.""" birth_date = "20000101" vaccination_date = "20250101" - age = calculate_age_at_vaccination(birth_date, vaccination_date) - self.assertEqual(age, 25) -class TestGetPractitionerDetailsFromPds(unittest.TestCase): - """Tests for get_practitioner_details_from_pds function.""" - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_success(self, mock_logger, mock_pds_get): - """Test successful retrieval of GP ODS code.""" - mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} - - result = get_practitioner_details_from_pds("9481152782") - - self.assertEqual(result, "Y12345") - mock_pds_get.assert_called_once_with("9481152782") - mock_logger.warning.assert_not_called() - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_no_gp_details(self, mock_logger, mock_pds_get): - """Test when generalPractitioner is missing.""" - mock_pds_get.return_value = {"name": "John Doe"} - - result = get_practitioner_details_from_pds("9481152782") - - self.assertIsNone(result) - mock_logger.warning.assert_called_once_with("No GP details found for patient") - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_gp_is_none(self, mock_logger, mock_pds_get): - """Test when generalPractitioner is None.""" - mock_pds_get.return_value = {"generalPractitioner": None} - - result = get_practitioner_details_from_pds("9481152782") - - self.assertIsNone(result) - mock_logger.warning.assert_called_once() - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): - """Test when value field is missing from generalPractitioner.""" - mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {}}]} - - result = get_practitioner_details_from_pds("9481152782") - - self.assertIsNone(result) - mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): - """Test when value is empty string.""" - mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": ""}}]} - - result = get_practitioner_details_from_pds("9481152782") - - self.assertIsNone(result) - mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): - """Test when PDS API raises exception.""" - mock_pds_get.side_effect = Exception("PDS API error") - with self.assertRaises(Exception) as context: - get_practitioner_details_from_pds("9481152782") - self.assertEqual(str(context.exception), "PDS API error") - mock_logger.exception.assert_called_once() - - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_patient_details_none(self, mock_logger, mock_pds_get): - """Test when pds_get_patient_details returns None.""" - mock_pds_get.return_value = None - - with self.assertRaises(AttributeError): - get_practitioner_details_from_pds("9481152782") - - class TestCreateMnsNotification(unittest.TestCase): """Tests for MNS notification creation.""" @classmethod def setUpClass(cls): - """Load the sample SQS event once for all tests.""" - sample_event_path = Path(__file__).parent.parent / "tests" / "sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - # Convert body from dict to JSON string (as it would be in real SQS) - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - cls.sample_sqs_event = raw_event + cls.sample_sqs_event = _load_sample_sqs_event() def setUp(self): - """Set up test fixtures.""" self.expected_gp_ods_code = "Y12345" self.expected_immunisation_url = "https://int.api.service.nhs.uk/immunisation-fhir-api" @@ -182,7 +89,6 @@ def setUp(self): @patch("create_notification.get_service_url") @patch("create_notification.uuid.uuid4") def test_create_mns_notification_success_with_real_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): - """Test successful MNS notification creation using real SQS event.""" mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -201,7 +107,6 @@ def test_create_mns_notification_success_with_real_payload(self, mock_uuid, mock @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_dataref_format_real_payload(self, mock_get_service_url, mock_get_gp): - """Test dataref URL format is correct with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -213,7 +118,6 @@ def test_create_mns_notification_dataref_format_real_payload(self, mock_get_serv @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_filtering_fields_real_payload(self, mock_get_service_url, mock_get_gp): - """Test all filtering fields are populated correctly with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -230,7 +134,6 @@ def test_create_mns_notification_filtering_fields_real_payload(self, mock_get_se @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_age_calculation_real_payload(self, mock_get_service_url, mock_get_gp): - """Test patient age is calculated correctly with real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -241,7 +144,6 @@ def test_create_mns_notification_age_calculation_real_payload(self, mock_get_ser @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_calls_get_practitioner_real_payload(self, mock_get_service_url, mock_get_gp): - """Test get_practitioner_details_from_pds is called with correct NHS number from real payload.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -252,7 +154,6 @@ def test_create_mns_notification_calls_get_practitioner_real_payload(self, mock_ @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock_get_gp): - """Test unique ID is generated for each notification.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -264,8 +165,8 @@ def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_get_gp): - """Test error handling when SQS body is invalid JSON.""" mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code invalid_event = {"messageId": "test-id", "body": "not valid json {"} @@ -275,7 +176,6 @@ def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, m @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_get_gp): - """Test handling when get_practitioner_details_from_pds call fails.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.side_effect = Exception("PDS API unavailable") @@ -285,7 +185,6 @@ def test_create_mns_notification_pds_failure(self, mock_get_service_url, mock_ge @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_gp_not_found(self, mock_get_service_url, mock_get_gp): - """Test handling when GP ODS code is not found (returns None).""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = None @@ -296,7 +195,6 @@ def test_create_mns_notification_gp_not_found(self, mock_get_service_url, mock_g @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_required_fields_present(self, mock_get_service_url, mock_get_gp): - """Test all required CloudEvents fields are present.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -309,7 +207,6 @@ def test_create_mns_notification_required_fields_present(self, mock_get_service_ @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_missing_imms_data_field(self, mock_get_service_url, mock_get_gp): - """Test handling when a required field is missing from imms_data.""" mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -321,103 +218,158 @@ def test_create_mns_notification_missing_imms_data_field(self, mock_get_service_ with self.assertRaises((KeyError, TypeError)): create_mns_notification(incomplete_event) + @patch("create_notification.get_practitioner_details_from_pds") + @patch("create_notification.get_service_url") + def test_create_mns_notification_with_update_action(self, mock_get_service_url, mock_get_gp): + mock_get_service_url.return_value = self.expected_immunisation_url + mock_get_gp.return_value = self.expected_gp_ods_code + + update_event = copy.deepcopy(self.sample_sqs_event) + + # Body is a JSON string; parse -> modify -> dump back + body = json.loads(update_event["body"]) + body["dynamodb"]["NewImage"]["Operation"]["S"] = "UPDATE" + update_event["body"] = json.dumps(body) + + result = create_mns_notification(update_event) + + self.assertEqual(result["filtering"]["action"], "UPDATE") + mock_get_service_url.assert_called() + mock_get_gp.assert_called() + + +class TestGetPractitionerDetailsFromPds(unittest.TestCase): + """Tests for get_practitioner_details_from_pds function.""" + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_success(self, mock_logger, mock_pds_get): + """Test successful retrieval of GP ODS code.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertEqual(result, "Y12345") + mock_pds_get.assert_called_once_with("9481152782") + mock_logger.warning.assert_not_called() -@patch("create_notification.get_practitioner_details_from_pds") -@patch("create_notification.get_service_url") -def test_create_mns_notification_with_update_action(self, mock_get_service_url, mock_get_gp): - """Test notification creation with UPDATE action using real payload structure.""" - mock_get_service_url.return_value = self.expected_immunisation_url - mock_get_gp.return_value = self.expected_gp_ods_code + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_gp_details(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is missing.""" + mock_pds_get.return_value = {"name": "John Doe"} + + result = get_practitioner_details_from_pds("9481152782") + + self.assertIsNone(result) + mock_logger.warning.assert_called_once_with("No GP details found for patient") + + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_gp_is_none(self, mock_logger, mock_pds_get): + """Test when generalPractitioner is None.""" + mock_pds_get.return_value = {"generalPractitioner": None} + + result = get_practitioner_details_from_pds("9481152782") - update_event = copy.deepcopy(self.sample_sqs_event) + self.assertIsNone(result) + mock_logger.warning.assert_called_once() - update_event["body"]["dynamodb"]["NewImage"]["Operation"]["S"] = "UPDATE" + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_value_field(self, mock_logger, mock_pds_get): + """Test when value field is missing from identifier.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {}}]} - result = create_mns_notification(update_event) + result = get_practitioner_details_from_pds("9481152782") - self.assertEqual(result["filtering"]["action"], "UPDATE") - mock_get_service_url.assert_called() - mock_get_gp.assert_called() + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_empty_value(self, mock_logger, mock_pds_get): + """Test when value is empty string.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": ""}}]} -@patch("create_notification.pds_get_patient_details") -@patch("create_notification.logger") -def test_get_practitioner_success_no_end_date(self, mock_logger, mock_pds_get): - """Test successful retrieval when no end date (current registration).""" - mock_pds_get.return_value = { - "generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}] - } + result = get_practitioner_details_from_pds("9481152782") - result = get_practitioner_details_from_pds("9481152782") + self.assertIsNone(result) + mock_logger.warning.assert_called_with("GP ODS code not found in practitioner details") - self.assertEqual(result, "Y12345") - mock_logger.warning.assert_not_called() + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when no end date (current registration).""" + mock_pds_get.return_value = { + "generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}] + } + result = get_practitioner_details_from_pds("9481152782") -@patch("create_notification.pds_get_patient_details") -@patch("create_notification.logger") -def test_get_practitioner_success_future_end_date(self, mock_logger, mock_pds_get): - """Test successful retrieval when end date is in the future.""" - mock_pds_get.return_value = { - "generalPractitioner": [ - {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2030-12-31"}}} - ] - } + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() - result = get_practitioner_details_from_pds("9481152782") + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_future_end_date(self, mock_logger, mock_pds_get): + """Test successful retrieval when end date is in the future.""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2030-12-31"}}} + ] + } - self.assertEqual(result, "Y12345") - mock_logger.warning.assert_not_called() + result = get_practitioner_details_from_pds("9481152782") + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() -@patch("create_notification.pds_get_patient_details") -@patch("create_notification.logger") -def test_get_practitioner_expired_registration(self, mock_logger, mock_pds_get): - """Test when GP registration has ended (expired).""" - mock_pds_get.return_value = { - "generalPractitioner": [ - {"identifier": {"value": "Y12345", "period": {"start": "2020-01-01", "end": "2023-12-31"}}} - ] - } + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_expired_registration(self, mock_logger, mock_pds_get): + """Test when GP registration has ended (expired).""" + mock_pds_get.return_value = { + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2020-01-01", "end": "2023-12-31"}}} + ] + } - result = get_practitioner_details_from_pds("9481152782") + result = get_practitioner_details_from_pds("9481152782") - self.assertIsNone(result) - mock_logger.warning.assert_called_with( - "GP registration has ended", - extra={"nhs_number": "9481152782", "gp_ods_code": "Y12345", "end_date": "2023-12-31"}, - ) + self.assertIsNone(result) + mock_logger.warning.assert_called_with("No current GP registration found for patient") + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_no_period_field(self, mock_logger, mock_pds_get): + """Test when period field is missing entirely.""" + mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} -@patch("create_notification.pds_get_patient_details") -@patch("create_notification.logger") -def test_get_practitioner_invalid_end_date_format(self, mock_logger, mock_pds_get): - """Test when end date has invalid format - should still return GP.""" - mock_pds_get.return_value = { - "generalPractitioner": [ - {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "invalid-date"}}} - ] - } + result = get_practitioner_details_from_pds("9481152782") - result = get_practitioner_details_from_pds("9481152782") + self.assertEqual(result, "Y12345") + mock_logger.warning.assert_not_called() - # Should still return GP even with invalid date - self.assertEqual(result, "Y12345") - mock_logger.warning.assert_called_with( - "Invalid end date format in GP registration", extra={"nhs_number": "9481152782", "end_date": "invalid-date"} - ) + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): + """Test when PDS API raises exception.""" + mock_pds_get.side_effect = Exception("PDS API error") + with self.assertRaises(Exception) as context: + get_practitioner_details_from_pds("9481152782") -@patch("create_notification.pds_get_patient_details") -@patch("create_notification.logger") -def test_get_practitioner_no_period_field(self, mock_logger, mock_pds_get): - """Test when period field is missing entirely.""" - mock_pds_get.return_value = {"generalPractitioner": [{"identifier": {"value": "Y12345"}}]} + self.assertEqual(str(context.exception), "PDS API error") - result = get_practitioner_details_from_pds("9481152782") + @patch("create_notification.pds_get_patient_details") + @patch("create_notification.logger") + def test_get_practitioner_patient_details_none(self, mock_logger, mock_pds_get): + """Test when pds_get_patient_details returns None.""" + mock_pds_get.return_value = None - self.assertEqual(result, "Y12345") - mock_logger.warning.assert_not_called() + with self.assertRaises(AttributeError): + get_practitioner_details_from_pds("9481152782") if __name__ == "__main__": diff --git a/tests/e2e_automation/utilities/error_constants.py b/tests/e2e_automation/utilities/error_constants.py index 5293d166ae..e416023706 100644 --- a/tests/e2e_automation/utilities/error_constants.py +++ b/tests/e2e_automation/utilities/error_constants.py @@ -31,7 +31,7 @@ }, "invalid_DiseaseType": { "code": "INVALID", - "diagnostics": "-immunization.target must be one or more of the following: ROTAVIRUS, RSV, SHINGLES, 6IN1, MMR, FLU, 3IN1, PERTUSSIS, MENB, HIB, MMRV, BCG, MENACWY, 4IN1, COVID, PNEUMOCOCCAL, HPV, HEPB", + "diagnostics": "-immunization.target must be one or more of the following: ROTAVIRUS, RSV, SHINGLES, 6IN1, MMR, HIBMENC, FLU, 3IN1, PERTUSSIS, MENB, HIB, MMRV, BCG, MENACWY, 4IN1, COVID, PNEUMOCOCCAL, HPV, HEPB", }, "invalid_DateFrom": { "code": "INVALID", From a8c3782b8ec81d22824e89a3b93c38d7099eb4d1 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 15:43:29 +0000 Subject: [PATCH 24/31] reflect new permission in e2e test --- .../mns_publisher/tests/test_create_notification.py | 2 -- lambdas/mns_publisher/tests/test_lambda_handler.py | 12 ++++-------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 6fabf14682..a77bcfe343 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -21,7 +21,6 @@ def _load_sample_sqs_event() -> dict: with open(sample_event_path, "r") as f: raw_event = json.load(f) - # Ensure body is a JSON string (SQS behaviour) if isinstance(raw_event.get("body"), dict): raw_event["body"] = json.dumps(raw_event["body"]) @@ -226,7 +225,6 @@ def test_create_mns_notification_with_update_action(self, mock_get_service_url, update_event = copy.deepcopy(self.sample_sqs_event) - # Body is a JSON string; parse -> modify -> dump back body = json.loads(update_event["body"]) body["dynamodb"]["NewImage"]["Operation"]["S"] = "UPDATE" update_event["body"] = json.dumps(body) diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 153f23d949..92208e52c1 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -17,7 +17,6 @@ def setUpClass(cls): with open(sample_event_path, "r") as f: raw_event = json.load(f) - # Convert body to JSON string if it's a dict if isinstance(raw_event.get("body"), dict): raw_event["body"] = json.dumps(raw_event["body"]) @@ -99,7 +98,6 @@ def test_process_record_success(self, mock_logger, mock_create_notification): mock_create_notification.return_value = self.sample_notification self.mock_mns_service.publish_notification.return_value = None - # Should not raise exception process_record(self.sample_sqs_record, self.mock_mns_service) mock_create_notification.assert_called_once_with(self.sample_sqs_record) @@ -112,7 +110,6 @@ def test_process_record_create_notification_failure(self, mock_logger, mock_crea """Test handling when notification creation fails.""" mock_create_notification.side_effect = Exception("Creation error") - # Should raise exception with self.assertRaises(Exception): process_record(self.sample_sqs_record, self.mock_mns_service) @@ -125,7 +122,6 @@ def test_process_record_publish_failure(self, mock_logger, mock_create_notificat mock_create_notification.return_value = self.sample_notification self.mock_mns_service.publish_notification.side_effect = Exception("Publish error") - # Should raise exception with self.assertRaises(Exception): process_record(self.sample_sqs_record, self.mock_mns_service) @@ -152,7 +148,7 @@ def test_process_records_all_success(self, mock_process_record, mock_get_mns, mo """Test processing multiple records with all successes.""" mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service - mock_process_record.return_value = None # No exception + mock_process_record.return_value = None record_2 = self.sample_sqs_record.copy() record_2["messageId"] = "different-id" @@ -173,8 +169,8 @@ def test_process_records_partial_failure(self, mock_process_record, mock_get_mns mock_mns_service = Mock() mock_get_mns.return_value = mock_mns_service mock_process_record.side_effect = [ - None, # Success - Exception("Processing error"), # Failure + None, + Exception("Processing error"), ] record_2 = self.sample_sqs_record.copy() @@ -214,7 +210,7 @@ def test_process_records_mns_service_created_once(self, mock_process_record, moc process_records(records) - mock_get_mns.assert_called_once() # Only created once + mock_get_mns.assert_called_once() class TestLambdaHandler(unittest.TestCase): From 1c92884c12b8670a5bf403961b6206e59b5f171f Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 16:48:04 +0000 Subject: [PATCH 25/31] add conditional for patient_detials --- lambdas/mns_publisher/src/create_notification.py | 3 +++ lambdas/mns_publisher/src/process_records.py | 1 + 2 files changed, 4 insertions(+) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index a60733feb5..3c2663d61e 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -64,6 +64,9 @@ def calculate_age_at_vaccination(birth_date: str, vaccination_date: str) -> int: def get_practitioner_details_from_pds(nhs_number: str) -> str | None: patient_details = pds_get_patient_details(nhs_number) + if not patient_details: + logger.info("Unable to retrieve patient details") + return None general_practitioners = patient_details.get("generalPractitioner", []) if not general_practitioners or len(general_practitioners) == 0: diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index 74e2577802..a70230be9b 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -27,6 +27,7 @@ def process_records(records: list[SQSMessage]) -> dict[str, list]: except Exception: message_id = record.get("messageId", "unknown") batch_item_failures.append({"itemIdentifier": message_id}) + logger.error(Exception) if batch_item_failures: logger.warning(f"Batch completed with {len(batch_item_failures)} failures") From 22fb4ed846c1058635130f9faa8590ebeacf3f41 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Wed, 25 Feb 2026 16:53:50 +0000 Subject: [PATCH 26/31] remove test for attribute error --- lambdas/mns_publisher/tests/test_create_notification.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index a77bcfe343..600f0a8f88 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -360,15 +360,6 @@ def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): self.assertEqual(str(context.exception), "PDS API error") - @patch("create_notification.pds_get_patient_details") - @patch("create_notification.logger") - def test_get_practitioner_patient_details_none(self, mock_logger, mock_pds_get): - """Test when pds_get_patient_details returns None.""" - mock_pds_get.return_value = None - - with self.assertRaises(AttributeError): - get_practitioner_details_from_pds("9481152782") - if __name__ == "__main__": unittest.main() From f13b4239fa4c671127e5d7deb1fdbe83cfb7362e Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 26 Feb 2026 08:23:54 +0000 Subject: [PATCH 27/31] check logs to pinpoint pds bad request --- lambdas/mns_publisher/src/process_records.py | 3 +- lambdas/mns_publisher/src/sqs_dynamo_utils.py | 2 + .../tests/test_create_notification.py | 19 +--- .../tests/test_sqs_dynamo_utils.py | 87 +++++++++++++++++++ lambdas/mns_publisher/tests/test_utils.py | 17 ++++ 5 files changed, 109 insertions(+), 19 deletions(-) create mode 100644 lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py create mode 100644 lambdas/mns_publisher/tests/test_utils.py diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index a70230be9b..aa2d2b4230 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -27,7 +27,7 @@ def process_records(records: list[SQSMessage]) -> dict[str, list]: except Exception: message_id = record.get("messageId", "unknown") batch_item_failures.append({"itemIdentifier": message_id}) - logger.error(Exception) + logger.exception("Failed to process record", trace_id={"message_id": message_id}) if batch_item_failures: logger.warning(f"Batch completed with {len(batch_item_failures)} failures") @@ -62,7 +62,6 @@ def process_record(record: SQSMessage, mns_service: MnsService) -> dict | None: }, ) - # Publish to MNS mns_service.publish_notification(mns_notification_payload) logger.info("Successfully created MNS notification", trace_ids={"mns_notification_id": notification_id}) diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py index edb09e4e30..1e6bc697d0 100644 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/src/sqs_dynamo_utils.py @@ -1,6 +1,7 @@ import json from typing import Any +from common.clients import logger from constants import DYNAMO_DB_TYPE_DESCRIPTORS, ImmsData @@ -18,6 +19,7 @@ def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) + logger.info("Top Level Fields retrieved successfully") imms_map = new_image.get("Imms", {}).get("M", {}) diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 600f0a8f88..7d60fb9047 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -1,7 +1,6 @@ import copy import json import unittest -from pathlib import Path from unittest.mock import MagicMock, patch from constants import IMMUNISATION_TYPE, SPEC_VERSION @@ -10,21 +9,7 @@ create_mns_notification, get_practitioner_details_from_pds, ) - - -def _load_sample_sqs_event() -> dict: - """ - Loads the sample SQS event and normalises body to a JSON string (as SQS delivers it). - Expects: lambdas/mns_publisher/tests/sqs_event.json - """ - sample_event_path = Path(__file__).parent / "sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - - return raw_event +from test_utils import load_sample_sqs_event class TestCalculateAgeAtVaccination(unittest.TestCase): @@ -78,7 +63,7 @@ class TestCreateMnsNotification(unittest.TestCase): @classmethod def setUpClass(cls): - cls.sample_sqs_event = _load_sample_sqs_event() + cls.sample_sqs_event = load_sample_sqs_event() def setUp(self): self.expected_gp_ods_code = "Y12345" diff --git a/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py new file mode 100644 index 0000000000..85c3090dea --- /dev/null +++ b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py @@ -0,0 +1,87 @@ +import json +import unittest + +from sqs_dynamo_utils import _unwrap_dynamodb_value, extract_sqs_imms_data +from test_utils import load_sample_sqs_event + + +class TestExtractSqsImmsData(unittest.TestCase): + """ + Test SQS Event extraction utility + """ + + @classmethod + def setUpClass(cls): + cls.sample_sqs_event = load_sample_sqs_event() + + def test_extract_sqs_imms_data(self): + result = extract_sqs_imms_data(self.sample_sqs_event) + + self.assertEqual(result["imms_id"], "d058014c-b0fd-4471-8db9-3316175eb825") + self.assertEqual(result["supplier_system"], "TPP") + self.assertEqual(result["vaccine_type"], "hib") + self.assertEqual(result["operation"], "CREATE") + self.assertEqual(result["nhs_number"], "9481152782") + self.assertEqual(result["person_dob"], "20040609") + self.assertEqual(result["date_and_time"], "20260212T17443700") + self.assertEqual(result["site_code"], "B0C4P") + + def test_extract_imms_data_field_types(self): + """Test that extracted fields are the correct types.""" + result = extract_sqs_imms_data(self.sample_sqs_event) + + self.assertIsInstance(result["imms_id"], str) + self.assertIsInstance(result["supplier_system"], str) + self.assertIsInstance(result["vaccine_type"], str) + self.assertIsInstance(result["operation"], str) + self.assertIsInstance(result["nhs_number"], str) + self.assertIsInstance(result["person_dob"], str) + self.assertIsInstance(result["date_and_time"], str) + self.assertIsInstance(result["site_code"], str) + + def test_extract_imms_data_invalid_json_body(self): + """Test extraction when body is invalid JSON.""" + event = {"body": "invalid json {"} + + with self.assertRaises(json.JSONDecodeError): + extract_sqs_imms_data(event) + + +class TestUnwrapDynamodbValue(unittest.TestCase): + """Tests for _unwrap_dynamodb_value helper function.""" + + def test_unwrap_string_type(self): + """Test unwrapping DynamoDB String type.""" + value = {"S": "test-value"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "test-value") + + def test_unwrap_number_type(self): + """Test unwrapping DynamoDB Number type.""" + value = {"N": "123"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "123") + + def test_unwrap_boolean_type(self): + """Test unwrapping DynamoDB Boolean type.""" + value = {"BOOL": True} + result = _unwrap_dynamodb_value(value) + self.assertTrue(result) + + def test_unwrap_null_type(self): + """Test unwrapping DynamoDB NULL type.""" + value = {"NULL": True} + result = _unwrap_dynamodb_value(value) + self.assertIsNone(result) + + def test_unwrap_map_type(self): + """Test unwrapping DynamoDB Map type.""" + value = {"M": {"key": {"S": "value"}}} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, {"key": {"S": "value"}}) + + def test_unwrap_list_type(self): + """Test unwrapping DynamoDB List type.""" + value = {"L": [{"S": "item1"}, {"S": "item2"}]} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) diff --git a/lambdas/mns_publisher/tests/test_utils.py b/lambdas/mns_publisher/tests/test_utils.py new file mode 100644 index 0000000000..56da38819d --- /dev/null +++ b/lambdas/mns_publisher/tests/test_utils.py @@ -0,0 +1,17 @@ +import json +from pathlib import Path + + +def load_sample_sqs_event() -> dict: + """ + Loads the sample SQS event and normalises body to a JSON string (as SQS delivers it). + Expects: lambdas/mns_publisher/tests/sqs_event.json + """ + sample_event_path = Path(__file__).parent / "sqs_event.json" + with open(sample_event_path, "r") as f: + raw_event = json.load(f) + + if isinstance(raw_event.get("body"), dict): + raw_event["body"] = json.dumps(raw_event["body"]) + + return raw_event From 023942a21cf2fd04d8149865041f900c51a8259c Mon Sep 17 00:00:00 2001 From: Akol125 Date: Thu, 26 Feb 2026 15:18:28 +0000 Subject: [PATCH 28/31] remove dynamo utils and add tests --- lambdas/mns_publisher/poetry.lock | 20 ++--- lambdas/mns_publisher/pyproject.toml | 5 ++ .../mns_publisher/src/create_notification.py | 57 +++++++++--- lambdas/mns_publisher/src/process_records.py | 9 +- lambdas/mns_publisher/src/sqs_dynamo_utils.py | 56 ------------ .../tests/test_create_notification.py | 41 +++++++++ .../tests/test_lambda_handler.py | 30 ++----- .../tests/test_sqs_dynamo_utils.py | 87 ------------------- 8 files changed, 109 insertions(+), 196 deletions(-) delete mode 100644 lambdas/mns_publisher/src/sqs_dynamo_utils.py diff --git a/lambdas/mns_publisher/poetry.lock b/lambdas/mns_publisher/poetry.lock index 0ec5203c51..69634eb57c 100644 --- a/lambdas/mns_publisher/poetry.lock +++ b/lambdas/mns_publisher/poetry.lock @@ -69,7 +69,7 @@ version = "2026.1.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, @@ -179,7 +179,7 @@ version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, @@ -493,7 +493,7 @@ version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, @@ -744,7 +744,7 @@ version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -827,7 +827,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -845,14 +845,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.25.8" +version = "0.26.0" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "dev"] files = [ - {file = "responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c"}, - {file = "responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4"}, + {file = "responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37"}, + {file = "responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4"}, ] [package.dependencies] @@ -911,7 +911,7 @@ version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, diff --git a/lambdas/mns_publisher/pyproject.toml b/lambdas/mns_publisher/pyproject.toml index aaff6cf5f1..2bafd9e372 100644 --- a/lambdas/mns_publisher/pyproject.toml +++ b/lambdas/mns_publisher/pyproject.toml @@ -23,3 +23,8 @@ cache = "^1.0.3" [build-system] requires = ["poetry-core >= 1.5.0"] build-backend = "poetry.core.masonry.api" + +[dependency-groups] +dev = [ + "responses (>=0.26.0,<0.27.0)" +] diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 3c2663d61e..651818aebf 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -1,14 +1,15 @@ +import json import os import uuid from datetime import datetime +from typing import Any from aws_lambda_typing.events.sqs import SQSMessage from common.api_clients.get_pds_details import pds_get_patient_details from common.clients import logger from common.get_service_url import get_service_url -from constants import IMMUNISATION_TYPE, SPEC_VERSION, MnsNotificationPayload -from sqs_dynamo_utils import extract_sqs_imms_data +from constants import DYNAMO_DB_TYPE_DESCRIPTORS, IMMUNISATION_TYPE, SPEC_VERSION, MnsNotificationPayload IMMUNIZATION_ENV = os.getenv("IMMUNIZATION_ENV") IMMUNIZATION_BASE_PATH = os.getenv("IMMUNIZATION_BASE_PATH") @@ -18,28 +19,37 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: """Create a notification payload for MNS.""" immunisation_url = get_service_url(IMMUNIZATION_ENV, IMMUNIZATION_BASE_PATH) - # Simple, direct extraction - imms_data = extract_sqs_imms_data(sqs_event) + body = json.loads(sqs_event.get("body", "{}")) + new_image = body.get("dynamodb", {}).get("NewImage", {}) + imms_id = _unwrap_dynamodb_value(new_image.get("ImmsID", {})) + supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) + vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) + operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) - patient_age = calculate_age_at_vaccination(imms_data["person_dob"], imms_data["date_and_time"]) + imms_map = new_image.get("Imms", {}).get("M", {}) + nhs_number = _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})) + person_dob = _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})) + date_and_time = _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})) + site_code = _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})) - gp_ods_code = get_practitioner_details_from_pds(imms_data["nhs_number"]) + patient_age = calculate_age_at_vaccination(person_dob, date_and_time) + gp_ods_code = get_practitioner_details_from_pds(nhs_number) return { "specversion": SPEC_VERSION, "id": str(uuid.uuid4()), "source": immunisation_url, "type": IMMUNISATION_TYPE, - "time": imms_data["date_and_time"], - "subject": imms_data["nhs_number"], - "dataref": f"{immunisation_url}/Immunization/{imms_data['imms_id']}", + "time": date_and_time, + "subject": nhs_number, + "dataref": f"{immunisation_url}/Immunization/{imms_id}", "filtering": { "generalpractitioner": gp_ods_code, - "sourceorganisation": imms_data["site_code"], - "sourceapplication": imms_data["supplier_system"], + "sourceorganisation": site_code, + "sourceapplication": supplier_system, "subjectage": str(patient_age), - "immunisationtype": imms_data["vaccine_type"], - "action": imms_data["operation"], + "immunisationtype": vaccine_type, + "action": operation, }, } @@ -95,3 +105,24 @@ def get_practitioner_details_from_pds(nhs_number: str) -> str | None: return None return gp_ods_code + + +def _unwrap_dynamodb_value(value: dict) -> Any: + """ + Unwrap DynamoDB type descriptor to get the actual value. + DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL + """ + if not isinstance(value, dict): + return value + + # DynamoDB type descriptors + if "NULL" in value: + return None + + # Check other DynamoDB types + for key in DYNAMO_DB_TYPE_DESCRIPTORS: + if key in value: + return value[key] + + # Not a DynamoDB type, return as-is + return value diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index aa2d2b4230..6047a23378 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -27,7 +27,7 @@ def process_records(records: list[SQSMessage]) -> dict[str, list]: except Exception: message_id = record.get("messageId", "unknown") batch_item_failures.append({"itemIdentifier": message_id}) - logger.exception("Failed to process record", trace_id={"message_id": message_id}) + logger.exception("Failed to process record", extra={"message_id": message_id}) if batch_item_failures: logger.warning(f"Batch completed with {len(batch_item_failures)} failures") @@ -37,7 +37,7 @@ def process_records(records: list[SQSMessage]) -> dict[str, list]: return {"batchItemFailures": batch_item_failures} -def process_record(record: SQSMessage, mns_service: MnsService) -> dict | None: +def process_record(record: SQSMessage, mns_service: MnsService) -> None: """ Process a single SQS record. Args: @@ -48,13 +48,12 @@ def process_record(record: SQSMessage, mns_service: MnsService) -> dict | None: message_id, immunisation_id = extract_trace_ids(record) notification_id = None - # Create notification payload mns_notification_payload = create_mns_notification(record) notification_id = mns_notification_payload.get("id") action_flag = mns_notification_payload.get("filtering", {}).get("action") logger.info( "Processing message", - trace_ids={ + extra={ "notification_id": notification_id, "message_id": message_id, "immunisation_id": immunisation_id, @@ -63,7 +62,7 @@ def process_record(record: SQSMessage, mns_service: MnsService) -> dict | None: ) mns_service.publish_notification(mns_notification_payload) - logger.info("Successfully created MNS notification", trace_ids={"mns_notification_id": notification_id}) + logger.info("Successfully created MNS notification", extra={"mns_notification_id": notification_id}) return None diff --git a/lambdas/mns_publisher/src/sqs_dynamo_utils.py b/lambdas/mns_publisher/src/sqs_dynamo_utils.py deleted file mode 100644 index 1e6bc697d0..0000000000 --- a/lambdas/mns_publisher/src/sqs_dynamo_utils.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -from typing import Any - -from common.clients import logger -from constants import DYNAMO_DB_TYPE_DESCRIPTORS, ImmsData - - -def extract_sqs_imms_data(sqs_record: dict) -> ImmsData: - """ - Extract immunisation data from SQS DynamoDB stream event. - Args: sqs_record: SQS record containing DynamoDB stream data - Returns: Dict with unwrapped values ready to use - """ - body = json.loads(sqs_record.get("body", "{}")) - new_image = body.get("dynamodb", {}).get("NewImage", {}) - - # Get top-level fields - imms_id = _unwrap_dynamodb_value(new_image.get("ImmsID", {})) - supplier_system = _unwrap_dynamodb_value(new_image.get("SupplierSystem", {})) - vaccine_type = _unwrap_dynamodb_value(new_image.get("VaccineType", {})) - operation = _unwrap_dynamodb_value(new_image.get("Operation", {})) - logger.info("Top Level Fields retrieved successfully") - - imms_map = new_image.get("Imms", {}).get("M", {}) - - return { - "imms_id": imms_id, - "supplier_system": supplier_system, - "vaccine_type": vaccine_type, - "operation": operation, - "nhs_number": _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})), - "person_dob": _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})), - "date_and_time": _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})), - "site_code": _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})), - } - - -def _unwrap_dynamodb_value(value: dict) -> Any: - """ - Unwrap DynamoDB type descriptor to get the actual value. - DynamoDB types: S (String), N (Number), BOOL, M (Map), L (List), NULL - """ - if not isinstance(value, dict): - return value - - # DynamoDB type descriptors - if "NULL" in value: - return None - - # Check other DynamoDB types - for key in DYNAMO_DB_TYPE_DESCRIPTORS: - if key in value: - return value[key] - - # Not a DynamoDB type, return as-is - return value diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 7d60fb9047..838acd1db0 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -5,6 +5,7 @@ from constants import IMMUNISATION_TYPE, SPEC_VERSION from create_notification import ( + _unwrap_dynamodb_value, calculate_age_at_vaccination, create_mns_notification, get_practitioner_details_from_pds, @@ -346,5 +347,45 @@ def test_get_practitioner_pds_exception(self, mock_logger, mock_pds_get): self.assertEqual(str(context.exception), "PDS API error") +class TestUnwrapDynamodbValue(unittest.TestCase): + """Tests for _unwrap_dynamodb_value helper function.""" + + def test_unwrap_string_type(self): + """Test unwrapping DynamoDB String type.""" + value = {"S": "test-value"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "test-value") + + def test_unwrap_number_type(self): + """Test unwrapping DynamoDB Number type.""" + value = {"N": "123"} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, "123") + + def test_unwrap_boolean_type(self): + """Test unwrapping DynamoDB Boolean type.""" + value = {"BOOL": True} + result = _unwrap_dynamodb_value(value) + self.assertTrue(result) + + def test_unwrap_null_type(self): + """Test unwrapping DynamoDB NULL type.""" + value = {"NULL": True} + result = _unwrap_dynamodb_value(value) + self.assertIsNone(result) + + def test_unwrap_map_type(self): + """Test unwrapping DynamoDB Map type.""" + value = {"M": {"key": {"S": "value"}}} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, {"key": {"S": "value"}}) + + def test_unwrap_list_type(self): + """Test unwrapping DynamoDB List type.""" + value = {"L": [{"S": "item1"}, {"S": "item2"}]} + result = _unwrap_dynamodb_value(value) + self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) + + if __name__ == "__main__": unittest.main() diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 92208e52c1..1b70662040 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -5,6 +5,7 @@ from lambda_handler import lambda_handler from process_records import extract_trace_ids, process_record, process_records +from test_utils import load_sample_sqs_event class TestExtractTraceIds(unittest.TestCase): @@ -13,14 +14,7 @@ class TestExtractTraceIds(unittest.TestCase): @classmethod def setUpClass(cls): """Load the sample SQS event once for all tests.""" - sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - - cls.sample_sqs_event = raw_event + cls.sample_sqs_event = load_sample_sqs_event() def test_extract_trace_ids_success_from_real_payload(self): """Test successful extraction using real SQS event structure.""" @@ -72,14 +66,7 @@ class TestProcessRecord(unittest.TestCase): @classmethod def setUpClass(cls): """Load the sample SQS event once for all tests.""" - sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - - cls.sample_sqs_record = raw_event + cls.sample_sqs_record = load_sample_sqs_event() def setUp(self): """Set up test fixtures.""" @@ -181,7 +168,7 @@ def test_process_records_partial_failure(self, mock_process_record, mock_get_mns self.assertEqual(len(result["batchItemFailures"]), 1) self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") - mock_logger.warning.assert_called_with("Batch completed with 1 failures") + mock_logger.exception.assert_called_once() @patch("process_records.logger") @patch("process_records.get_mns_service") @@ -219,14 +206,7 @@ class TestLambdaHandler(unittest.TestCase): @classmethod def setUpClass(cls): """Load the sample SQS event once for all tests.""" - sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - - cls.sample_sqs_record = raw_event + cls.sample_sqs_record = load_sample_sqs_event() @patch("lambda_handler.process_records") def test_lambda_handler_all_success(self, mock_process_records): diff --git a/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py index 85c3090dea..e69de29bb2 100644 --- a/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py +++ b/lambdas/mns_publisher/tests/test_sqs_dynamo_utils.py @@ -1,87 +0,0 @@ -import json -import unittest - -from sqs_dynamo_utils import _unwrap_dynamodb_value, extract_sqs_imms_data -from test_utils import load_sample_sqs_event - - -class TestExtractSqsImmsData(unittest.TestCase): - """ - Test SQS Event extraction utility - """ - - @classmethod - def setUpClass(cls): - cls.sample_sqs_event = load_sample_sqs_event() - - def test_extract_sqs_imms_data(self): - result = extract_sqs_imms_data(self.sample_sqs_event) - - self.assertEqual(result["imms_id"], "d058014c-b0fd-4471-8db9-3316175eb825") - self.assertEqual(result["supplier_system"], "TPP") - self.assertEqual(result["vaccine_type"], "hib") - self.assertEqual(result["operation"], "CREATE") - self.assertEqual(result["nhs_number"], "9481152782") - self.assertEqual(result["person_dob"], "20040609") - self.assertEqual(result["date_and_time"], "20260212T17443700") - self.assertEqual(result["site_code"], "B0C4P") - - def test_extract_imms_data_field_types(self): - """Test that extracted fields are the correct types.""" - result = extract_sqs_imms_data(self.sample_sqs_event) - - self.assertIsInstance(result["imms_id"], str) - self.assertIsInstance(result["supplier_system"], str) - self.assertIsInstance(result["vaccine_type"], str) - self.assertIsInstance(result["operation"], str) - self.assertIsInstance(result["nhs_number"], str) - self.assertIsInstance(result["person_dob"], str) - self.assertIsInstance(result["date_and_time"], str) - self.assertIsInstance(result["site_code"], str) - - def test_extract_imms_data_invalid_json_body(self): - """Test extraction when body is invalid JSON.""" - event = {"body": "invalid json {"} - - with self.assertRaises(json.JSONDecodeError): - extract_sqs_imms_data(event) - - -class TestUnwrapDynamodbValue(unittest.TestCase): - """Tests for _unwrap_dynamodb_value helper function.""" - - def test_unwrap_string_type(self): - """Test unwrapping DynamoDB String type.""" - value = {"S": "test-value"} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, "test-value") - - def test_unwrap_number_type(self): - """Test unwrapping DynamoDB Number type.""" - value = {"N": "123"} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, "123") - - def test_unwrap_boolean_type(self): - """Test unwrapping DynamoDB Boolean type.""" - value = {"BOOL": True} - result = _unwrap_dynamodb_value(value) - self.assertTrue(result) - - def test_unwrap_null_type(self): - """Test unwrapping DynamoDB NULL type.""" - value = {"NULL": True} - result = _unwrap_dynamodb_value(value) - self.assertIsNone(result) - - def test_unwrap_map_type(self): - """Test unwrapping DynamoDB Map type.""" - value = {"M": {"key": {"S": "value"}}} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, {"key": {"S": "value"}}) - - def test_unwrap_list_type(self): - """Test unwrapping DynamoDB List type.""" - value = {"L": [{"S": "item1"}, {"S": "item2"}]} - result = _unwrap_dynamodb_value(value) - self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) From 930513894af95b8f445b0c2de45d77ddf4088bde Mon Sep 17 00:00:00 2001 From: Akol125 Date: Fri, 27 Feb 2026 17:13:13 +0000 Subject: [PATCH 29/31] fail fast with no nhs_no and lambda int test --- .../mns_publisher/src/create_notification.py | 7 +- lambdas/mns_publisher/src/process_records.py | 1 + .../tests/{ => sample_data}/sqs_event.json | 0 .../tests/test_create_notification.py | 57 ++++--------- .../tests/test_lambda_handler.py | 79 ++++++++++++++++--- lambdas/mns_publisher/tests/test_utils.py | 17 +++- 6 files changed, 103 insertions(+), 58 deletions(-) rename lambdas/mns_publisher/tests/{ => sample_data}/sqs_event.json (100%) diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index 651818aebf..d8abd48f6c 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -28,6 +28,10 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: imms_map = new_image.get("Imms", {}).get("M", {}) nhs_number = _unwrap_dynamodb_value(imms_map.get("NHS_NUMBER", {})) + if not nhs_number: + logger.error("Missing required field: Nhs Number") + raise ValueError("NHS number is required to create MNS notification") + person_dob = _unwrap_dynamodb_value(imms_map.get("PERSON_DOB", {})) date_and_time = _unwrap_dynamodb_value(imms_map.get("DATE_AND_TIME", {})) site_code = _unwrap_dynamodb_value(imms_map.get("SITE_CODE", {})) @@ -115,14 +119,11 @@ def _unwrap_dynamodb_value(value: dict) -> Any: if not isinstance(value, dict): return value - # DynamoDB type descriptors if "NULL" in value: return None - # Check other DynamoDB types for key in DYNAMO_DB_TYPE_DESCRIPTORS: if key in value: return value[key] - # Not a DynamoDB type, return as-is return value diff --git a/lambdas/mns_publisher/src/process_records.py b/lambdas/mns_publisher/src/process_records.py index 6047a23378..72dd736435 100644 --- a/lambdas/mns_publisher/src/process_records.py +++ b/lambdas/mns_publisher/src/process_records.py @@ -50,6 +50,7 @@ def process_record(record: SQSMessage, mns_service: MnsService) -> None: mns_notification_payload = create_mns_notification(record) notification_id = mns_notification_payload.get("id") + action_flag = mns_notification_payload.get("filtering", {}).get("action") logger.info( "Processing message", diff --git a/lambdas/mns_publisher/tests/sqs_event.json b/lambdas/mns_publisher/tests/sample_data/sqs_event.json similarity index 100% rename from lambdas/mns_publisher/tests/sqs_event.json rename to lambdas/mns_publisher/tests/sample_data/sqs_event.json diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 838acd1db0..89414ffc00 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -73,7 +73,7 @@ def setUp(self): @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") @patch("create_notification.uuid.uuid4") - def test_create_mns_notification_success_with_real_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): + def test_create_mns_notification_complete_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -84,47 +84,33 @@ def test_create_mns_notification_success_with_real_payload(self, mock_uuid, mock self.assertEqual(result["type"], IMMUNISATION_TYPE) self.assertEqual(result["source"], self.expected_immunisation_url) self.assertEqual(result["subject"], "9481152782") - self.assertIn("id", result) - self.assertIn("time", result) - self.assertIn("dataref", result) - self.assertIn("filtering", result) - - @patch("create_notification.get_practitioner_details_from_pds") - @patch("create_notification.get_service_url") - def test_create_mns_notification_dataref_format_real_payload(self, mock_get_service_url, mock_get_gp): - mock_get_service_url.return_value = self.expected_immunisation_url - mock_get_gp.return_value = self.expected_gp_ods_code - - result = create_mns_notification(self.sample_sqs_event) expected_dataref = f"{self.expected_immunisation_url}/Immunization/d058014c-b0fd-4471-8db9-3316175eb825" self.assertEqual(result["dataref"], expected_dataref) - @patch("create_notification.get_practitioner_details_from_pds") - @patch("create_notification.get_service_url") - def test_create_mns_notification_filtering_fields_real_payload(self, mock_get_service_url, mock_get_gp): - mock_get_service_url.return_value = self.expected_immunisation_url - mock_get_gp.return_value = self.expected_gp_ods_code - - result = create_mns_notification(self.sample_sqs_event) - filtering = result["filtering"] self.assertEqual(filtering["generalpractitioner"], self.expected_gp_ods_code) self.assertEqual(filtering["sourceorganisation"], "B0C4P") self.assertEqual(filtering["sourceapplication"], "TPP") self.assertEqual(filtering["immunisationtype"], "hib") self.assertEqual(filtering["action"], "CREATE") - self.assertIsInstance(filtering["subjectage"], str) + self.assertEqual(filtering["subjectage"], "21") + + self.assertIn("id", result) + self.assertIsInstance(result["id"], str) @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") - def test_create_mns_notification_age_calculation_real_payload(self, mock_get_service_url, mock_get_gp): - mock_get_service_url.return_value = self.expected_immunisation_url - mock_get_gp.return_value = self.expected_gp_ods_code + def test_create_mns_notification_missing_nhs_number(self, mock_get_service_url, mock_get_gp): + sqs_event_data = copy.deepcopy(self.sample_sqs_event) - result = create_mns_notification(self.sample_sqs_event) + body = json.loads(sqs_event_data["body"]) + body["dynamodb"]["NewImage"]["Imms"]["M"]["NHS_NUMBER"]["S"] = "" + sqs_event_data["body"] = json.dumps(body) - self.assertEqual(result["filtering"]["subjectage"], "21") + with self.assertRaises(ValueError) as context: + create_mns_notification(sqs_event_data) + self.assertIn("NHS number is required", str(context.exception)) @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") @@ -136,17 +122,6 @@ def test_create_mns_notification_calls_get_practitioner_real_payload(self, mock_ mock_get_gp.assert_called_once_with("9481152782") - @patch("create_notification.get_practitioner_details_from_pds") - @patch("create_notification.get_service_url") - def test_create_mns_notification_uuid_generated(self, mock_get_service_url, mock_get_gp): - mock_get_service_url.return_value = self.expected_immunisation_url - mock_get_gp.return_value = self.expected_gp_ods_code - - result1 = create_mns_notification(self.sample_sqs_event) - result2 = create_mns_notification(self.sample_sqs_event) - - self.assertNotEqual(result1["id"], result2["id"]) - @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") def test_create_mns_notification_invalid_json_body(self, mock_get_service_url, mock_get_gp): @@ -200,7 +175,7 @@ def test_create_mns_notification_missing_imms_data_field(self, mock_get_service_ "body": json.dumps({"dynamodb": {"NewImage": {"ImmsID": {"S": "test-id"}}}}), } - with self.assertRaises((KeyError, TypeError)): + with self.assertRaises((KeyError, TypeError, ValueError)): create_mns_notification(incomplete_event) @patch("create_notification.get_practitioner_details_from_pds") @@ -385,7 +360,3 @@ def test_unwrap_list_type(self): value = {"L": [{"S": "item1"}, {"S": "item2"}]} result = _unwrap_dynamodb_value(value) self.assertEqual(result, [{"S": "item1"}, {"S": "item2"}]) - - -if __name__ == "__main__": - unittest.main() diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 1b70662040..c085c2141c 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -1,11 +1,14 @@ import json import unittest -from pathlib import Path from unittest.mock import Mock, patch +import boto3 +import responses +from moto import mock_aws + from lambda_handler import lambda_handler from process_records import extract_trace_ids, process_record, process_records -from test_utils import load_sample_sqs_event +from test_utils import generate_private_key_b64, load_sample_sqs_event class TestExtractTraceIds(unittest.TestCase): @@ -119,14 +122,7 @@ class TestProcessRecords(unittest.TestCase): @classmethod def setUpClass(cls): """Load the sample SQS event once for all tests.""" - sample_event_path = Path(__file__).parent.parent / "tests/sqs_event.json" - with open(sample_event_path, "r") as f: - raw_event = json.load(f) - - if isinstance(raw_event.get("body"), dict): - raw_event["body"] = json.dumps(raw_event["body"]) - - cls.sample_sqs_record = raw_event + cls.sample_sqs_record = load_sample_sqs_event() @patch("process_records.logger") @patch("process_records.get_mns_service") @@ -168,7 +164,7 @@ def test_process_records_partial_failure(self, mock_process_record, mock_get_mns self.assertEqual(len(result["batchItemFailures"]), 1) self.assertEqual(result["batchItemFailures"][0]["itemIdentifier"], "msg-456") - mock_logger.exception.assert_called_once() + mock_logger.warning.assert_called_with("Batch completed with 1 failures") @patch("process_records.logger") @patch("process_records.get_mns_service") @@ -241,5 +237,66 @@ def test_lambda_handler_empty_records(self, mock_process_records): mock_process_records.assert_called_once_with([]) +@mock_aws +class TestLambdaHandlerIntegration(unittest.TestCase): + """ + Integration tests + """ + + def setUp(self): + """Set up mocked AWS services and test data.""" + self.sample_sqs_record = load_sample_sqs_event() + self.secrets_client = boto3.client("secretsmanager", region_name="eu-west-2") + self.secrets_client.create_secret( + Name="imms/pds/int/jwt-secrets", + SecretString=json.dumps( + {"api_key": "fake-pds-api-key", "kid": "fake-kid-123", "private_key_b64": generate_private_key_b64()} + ), + ) + + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.logger") + def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_token): + # Mock OAuth token response issued from Apigee + mock_oauth_response = Mock() + mock_oauth_response.status_code = 200 + mock_oauth_response.json.return_value = {"access_token": "fake-token"} + mock_get_token.return_value = mock_oauth_response + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={"generalPractitioner": [{"identifier": {"value": "Y12345", "period": {"start": "2024-01-01"}}}]}, + status=200, + ) + + mns_response = responses.add( + responses.POST, + "https://int.api.service.nhs.uk/multicast-notification-service/events", + json={"id": "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b"}, + status=200, + ) + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + + self.assertEqual(mns_response.call_count, 1) + self.assertEqual(mns_response.calls[0].response.status_code, 200) + mns_payload = json.loads(mns_response.calls[0].request.body) + self.assertEqual(mns_payload["subject"], "9481152782") + self.assertEqual(mns_payload["filtering"]["generalpractitioner"], "Y12345") + self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") + self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "hib") + self.assertEqual(mns_payload["filtering"]["action"], "CREATE") + self.assertEqual(mns_payload["filtering"]["subjectage"], "21") + + mock_logger.info.assert_any_call("Successfully processed all 1 messages") + + if __name__ == "__main__": unittest.main() diff --git a/lambdas/mns_publisher/tests/test_utils.py b/lambdas/mns_publisher/tests/test_utils.py index 56da38819d..4c6a71c15f 100644 --- a/lambdas/mns_publisher/tests/test_utils.py +++ b/lambdas/mns_publisher/tests/test_utils.py @@ -1,13 +1,28 @@ +import base64 import json from pathlib import Path +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + + +def generate_private_key_b64() -> str: + # Generate a real RSA private key (PKCS8) and base64 encode the PEM + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + pem_bytes = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + return base64.b64encode(pem_bytes).decode("utf-8") + def load_sample_sqs_event() -> dict: """ Loads the sample SQS event and normalises body to a JSON string (as SQS delivers it). Expects: lambdas/mns_publisher/tests/sqs_event.json """ - sample_event_path = Path(__file__).parent / "sqs_event.json" + sample_event_path = Path(__file__).parent / "sample_data" / "sqs_event.json" with open(sample_event_path, "r") as f: raw_event = json.load(f) From 7ab3bb3b7d242953ba535bb99bc543cf5a8175f9 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 3 Mar 2026 08:37:50 +0000 Subject: [PATCH 30/31] integration test --- .../tests/test_lambda_handler.py | 91 +++++++++++++++++-- 1 file changed, 85 insertions(+), 6 deletions(-) diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index c085c2141c..3274802de8 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -258,11 +258,12 @@ def setUp(self): @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") @patch("process_records.logger") def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_token): + """ + Test a Successful MNS Publish notification with calls to PDS for GP details, no batch failure + """ + # Mock OAuth token response issued from Apigee - mock_oauth_response = Mock() - mock_oauth_response.status_code = 200 - mock_oauth_response.json.return_value = {"access_token": "fake-token"} - mock_get_token.return_value = mock_oauth_response + mock_get_token.return_value = {"access_token": "fake-token"} # Intercepts actual request call to PDS and returns mocked responses responses.add( @@ -297,6 +298,84 @@ def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_to mock_logger.info.assert_any_call("Successfully processed all 1 messages") + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.get_mns_service") + @patch("process_records.logger") + def test_pds_failure(self, mock_logger, mock_get_mns, mock_get_token): + """ + Test that a PDS client error results in a batch item failure and MNS is not called. + """ + + # Mock OAuth token response issued from Apigee + mock_get_token.return_value = "fake-token" + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={ + "resourceType": "OperationOutcome", + "issue": [{"severity": "error", "code": "processing", "diagnostics": "Patient not found"}], + }, + status=400, + ) + + mock_mns_service = Mock() + mock_mns_service.publish_notification.return_value = None + mock_get_mns.return_value = mock_mns_service + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(len(result["batchItemFailures"]), 1) + mock_mns_service.publish_notification.assert_not_called() + mock_logger.warning.assert_called_with("Batch completed with 1 failures") + + @responses.activate + @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") + @patch("process_records.logger") + def test_non_successful_notification_creation_without_gp(self, mock_logger, mock_get_token): + """ + Test a Successful MNS Publish notification with calls to PDS for GP details, no batch failure + """ + + # Mock OAuth token response issued from Apigee + mock_get_token.return_value = {"access_token": "fake-token"} + + # Intercepts actual request call to PDS and returns mocked responses + responses.add( + responses.GET, + "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9481152782", + json={ + "generalPractitioner": [ + {"identifier": {"value": "Y12345", "period": {"start": "2024-01-01", "end": "2025-12-31"}}} + ] + }, + status=200, + ) -if __name__ == "__main__": - unittest.main() + mns_response = responses.add( + responses.POST, + "https://int.api.service.nhs.uk/multicast-notification-service/events", + json={"id": "236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b"}, + status=200, + ) + + sqs_event = {"Records": [self.sample_sqs_record]} + result = lambda_handler(sqs_event, Mock()) + + self.assertEqual(result, {"batchItemFailures": []}) + + self.assertEqual(mns_response.call_count, 1) + self.assertEqual(mns_response.calls[0].response.status_code, 200) + mns_payload = json.loads(mns_response.calls[0].request.body) + self.assertEqual(mns_payload["subject"], "9481152782") + self.assertEqual(mns_payload["filtering"]["generalpractitioner"], None) + self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") + self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "hib") + self.assertEqual(mns_payload["filtering"]["action"], "CREATE") + self.assertEqual(mns_payload["filtering"]["subjectage"], "21") + + mock_logger.info.assert_any_call("Successfully processed all 1 messages") From 8e22a92a35bec612ade6edb4673d6a0dfcfe9d17 Mon Sep 17 00:00:00 2001 From: Akol125 Date: Tue, 3 Mar 2026 10:27:31 +0000 Subject: [PATCH 31/31] change subject age to integer in test --- lambdas/mns_publisher/src/constants.py | 15 +-------------- lambdas/mns_publisher/src/create_notification.py | 4 ++-- .../tests/test_create_notification.py | 6 +++--- .../mns_publisher/tests/test_lambda_handler.py | 10 +++++----- 4 files changed, 11 insertions(+), 24 deletions(-) diff --git a/lambdas/mns_publisher/src/constants.py b/lambdas/mns_publisher/src/constants.py index bc91ce3455..cbad37bd06 100644 --- a/lambdas/mns_publisher/src/constants.py +++ b/lambdas/mns_publisher/src/constants.py @@ -12,7 +12,7 @@ class FilteringData(TypedDict): generalpractitioner: str | None sourceorganisation: str sourceapplication: str - subjectage: str + subjectage: int immunisationtype: str action: str @@ -31,16 +31,3 @@ class MnsNotificationPayload(TypedDict): DYNAMO_DB_TYPE_DESCRIPTORS = ("S", "N", "BOOL", "M", "L") - - -class ImmsData(TypedDict): - """Extracted immunisation data from DynamoDB stream.""" - - imms_id: str - supplier_system: str - vaccine_type: str - operation: str - nhs_number: str - person_dob: str - date_and_time: str - site_code: str diff --git a/lambdas/mns_publisher/src/create_notification.py b/lambdas/mns_publisher/src/create_notification.py index d8abd48f6c..ac611ec9f3 100644 --- a/lambdas/mns_publisher/src/create_notification.py +++ b/lambdas/mns_publisher/src/create_notification.py @@ -51,8 +51,8 @@ def create_mns_notification(sqs_event: SQSMessage) -> MnsNotificationPayload: "generalpractitioner": gp_ods_code, "sourceorganisation": site_code, "sourceapplication": supplier_system, - "subjectage": str(patient_age), - "immunisationtype": vaccine_type, + "subjectage": patient_age, + "immunisationtype": vaccine_type.upper(), "action": operation, }, } diff --git a/lambdas/mns_publisher/tests/test_create_notification.py b/lambdas/mns_publisher/tests/test_create_notification.py index 89414ffc00..179b1ee28b 100644 --- a/lambdas/mns_publisher/tests/test_create_notification.py +++ b/lambdas/mns_publisher/tests/test_create_notification.py @@ -73,7 +73,7 @@ def setUp(self): @patch("create_notification.get_practitioner_details_from_pds") @patch("create_notification.get_service_url") @patch("create_notification.uuid.uuid4") - def test_create_mns_notification_complete_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): + def test_success_create_mns_notification_complete_payload(self, mock_uuid, mock_get_service_url, mock_get_gp): mock_uuid.return_value = MagicMock(hex="236a1d4a-5d69-4fa9-9c7f-e72bf505aa5b") mock_get_service_url.return_value = self.expected_immunisation_url mock_get_gp.return_value = self.expected_gp_ods_code @@ -92,9 +92,9 @@ def test_create_mns_notification_complete_payload(self, mock_uuid, mock_get_serv self.assertEqual(filtering["generalpractitioner"], self.expected_gp_ods_code) self.assertEqual(filtering["sourceorganisation"], "B0C4P") self.assertEqual(filtering["sourceapplication"], "TPP") - self.assertEqual(filtering["immunisationtype"], "hib") + self.assertEqual(filtering["immunisationtype"], "HIB") self.assertEqual(filtering["action"], "CREATE") - self.assertEqual(filtering["subjectage"], "21") + self.assertEqual(filtering["subjectage"], 21) self.assertIn("id", result) self.assertIsInstance(result["id"], str) diff --git a/lambdas/mns_publisher/tests/test_lambda_handler.py b/lambdas/mns_publisher/tests/test_lambda_handler.py index 3274802de8..1602e01214 100644 --- a/lambdas/mns_publisher/tests/test_lambda_handler.py +++ b/lambdas/mns_publisher/tests/test_lambda_handler.py @@ -292,9 +292,9 @@ def test_successful_notification_creation_with_gp(self, mock_logger, mock_get_to self.assertEqual(mns_payload["filtering"]["generalpractitioner"], "Y12345") self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") - self.assertEqual(mns_payload["filtering"]["immunisationtype"], "hib") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "HIB") self.assertEqual(mns_payload["filtering"]["action"], "CREATE") - self.assertEqual(mns_payload["filtering"]["subjectage"], "21") + self.assertEqual(mns_payload["filtering"]["subjectage"], 21) mock_logger.info.assert_any_call("Successfully processed all 1 messages") @@ -335,7 +335,7 @@ def test_pds_failure(self, mock_logger, mock_get_mns, mock_get_token): @responses.activate @patch("common.api_clients.authentication.AppRestrictedAuth.get_access_token") @patch("process_records.logger") - def test_non_successful_notification_creation_without_gp(self, mock_logger, mock_get_token): + def test_successful_notification_creation_with_expired_gp(self, mock_logger, mock_get_token): """ Test a Successful MNS Publish notification with calls to PDS for GP details, no batch failure """ @@ -374,8 +374,8 @@ def test_non_successful_notification_creation_without_gp(self, mock_logger, mock self.assertEqual(mns_payload["filtering"]["generalpractitioner"], None) self.assertEqual(mns_payload["filtering"]["sourceorganisation"], "B0C4P") self.assertEqual(mns_payload["filtering"]["sourceapplication"], "TPP") - self.assertEqual(mns_payload["filtering"]["immunisationtype"], "hib") + self.assertEqual(mns_payload["filtering"]["immunisationtype"], "HIB") self.assertEqual(mns_payload["filtering"]["action"], "CREATE") - self.assertEqual(mns_payload["filtering"]["subjectage"], "21") + self.assertEqual(mns_payload["filtering"]["subjectage"], 21) mock_logger.info.assert_any_call("Successfully processed all 1 messages")