diff --git a/ack_backend/tests/test_ack_processor.py b/ack_backend/tests/test_ack_processor.py index b253251e6..6292f736d 100644 --- a/ack_backend/tests/test_ack_processor.py +++ b/ack_backend/tests/test_ack_processor.py @@ -7,48 +7,42 @@ from ack_processor import lambda_handler from update_ack_file import obtain_current_ack_content, create_ack_data, update_ack_file from tests.test_utils_for_ack_backend import ( - DESTINATION_BUCKET_NAME, - AWS_REGION, + REGION_NAME, ValidValues, - CREATED_AT_FORMATTED_STRING, + MOCK_CREATED_AT_FORMATTED_STRING, DiagnosticsDictionaries, + MOCK_ENVIRONMENT_DICT, + BucketNames, + GenericSetUp, + GenericTearDown, + MockFileDetails, ) from copy import deepcopy import uuid -s3_client = boto3_client("s3", region_name=AWS_REGION) -file_name = "COVID19_Vaccinations_v5_YGM41_20240909T13005902.csv" -ack_file_key = "forwardedFile/COVID19_Vaccinations_v5_YGM41_20240909T13005902_BusAck_20241115T13435500.csv" -test_ack_file_key = "forwardedFile/COVID19_Vaccinations_v5_YGM41_20240909T13005902_BusAck_20241115T13455555.csv" +s3_client = boto3_client("s3", region_name=REGION_NAME) local_id = "111^222" -os.environ["ACK_BUCKET_NAME"] = DESTINATION_BUCKET_NAME -invalid_action_flag_diagnostics = "Invalid ACTION_FLAG - ACTION_FLAG must be 'NEW', 'UPDATE' or 'DELETE'" -test_bucket_name = "immunisation-batch-internal-testlambda-data-destinations" +INVALID_ACTION_FLAG_DIAGNOSTICS = "Invalid ACTION_FLAG - ACTION_FLAG must be 'NEW', 'UPDATE' or 'DELETE'" +mock_file = MockFileDetails.flu_emis +ack_file_key = mock_file.ack_file_key + +@patch.dict(os.environ, MOCK_ENVIRONMENT_DICT) @mock_s3 @mock_sqs class TestAckProcessor(unittest.TestCase): - def setup_s3(self): - """Creates a mock S3 bucket contain a single file different to one created during tests - to ensure s3 bucket loads correctly""" - ack_bucket_name = "immunisation-batch-internal-testlambda-data-destinations" - os.environ["ACK_BUCKET_NAME"] = test_bucket_name - existing_content = ValidValues.test_ack_header - s3_client.create_bucket( - Bucket=ack_bucket_name, - CreateBucketConfiguration={"LocationConstraint": AWS_REGION}, - ) - s3_client.put_object(Bucket=test_bucket_name, Key="some_other_file", Body=existing_content) + def setUp(self) -> None: + GenericSetUp(s3_client) - def setup_existing_ack_file(self, bucket_name, file_key, file_content): - """Creates a mock S3 bucket and uploads an existing file with the given content.""" - s3_client.create_bucket( - Bucket=DESTINATION_BUCKET_NAME, CreateBucketConfiguration={"LocationConstraint": AWS_REGION} - ) - s3_client.put_object(Bucket=bucket_name, Key=file_key, Body=file_content) + def tearDown(self) -> None: + GenericTearDown(s3_client) + + def setup_existing_ack_file(self, file_key, file_content): + """Uploads an existing file with the given content.""" + s3_client.put_object(Bucket=BucketNames.DESTINATION, Key=file_key, Body=file_content) def create_event(self, test_data): """ @@ -62,12 +56,12 @@ def create_event(self, test_data): return {"Records": [{"body": json.dumps(rows)}]} row_template = { - "file_key": file_name, + "file_key": mock_file.file_key, "row_id": "123^1", "local_id": ValidValues.local_id, "operation_requested": "create", "imms_id": "", - "created_at_formatted_string": "20241115T13435500", + "created_at_formatted_string": mock_file.created_at_formatted_string, } def ack_row_order(self, row_input, expected_ack_file_content, actual_ack_file_content): @@ -95,7 +89,7 @@ def ack_row_order(self, row_input, expected_ack_file_content, actual_ack_file_co def create_expected_ack_content(self, row_input, actual_ack_file_content, expected_ack_file_content): """creates test ack rows from using a list containing multiple rows""" - for i, row in enumerate(row_input): + for row in row_input: diagnostics_dictionary = row.get("diagnostics", {}) diagnostics = ( diagnostics_dictionary.get("error_message", "") @@ -107,11 +101,11 @@ def create_expected_ack_content(self, row_input, actual_ack_file_content, expect if diagnostics: ack_row = ( f"{row_id}|Fatal Error|Fatal|Fatal Error|30002|Business|30002|Business Level " - f"Response Value - Processing Error|20241115T13435500||111^222|{imms_id}|{diagnostics}|False" + f"Response Value - Processing Error|{mock_file.created_at_formatted_string}||111^222|{imms_id}|{diagnostics}|False" ) else: ack_row = ( - f"{row_id}|OK|Information|OK|30001|Business|30001|Success|20241115T13435500|" + f"{row_id}|OK|Information|OK|30001|Business|30001|Success|{mock_file.created_at_formatted_string}|" f"|111^222|{imms_id}||True" ) @@ -125,7 +119,9 @@ def generate_file_names(self): Returns: dict: A dictionary containing `file_key_existing`, `ack_file_name`, and `row_template`.""" file_key_existing = f"COVID19_Vaccinations_v5_DPSREDUCED_{uuid.uuid4().hex}.csv" - ack_file_name = f"forwardedFile/{file_key_existing.replace('.csv', '_BusAck_20241115T13435500.csv')}" + ack_file_name = ( + f"forwardedFile/{file_key_existing.replace('.csv', f'_BusAck_{mock_file.created_at_formatted_string}.csv')}" + ) row_template = self.row_template.copy() row_template.update({"file_key": file_key_existing}) @@ -136,25 +132,9 @@ def generate_file_names(self): "row_template": row_template, } - def environment_setup(self, ack_file_name, existing_content): - """ - Generates a file with existing content in the mock s3 bucket and updates row_template. - test_case_description (str): Description of the test case. - existing_content (str): The initial content to upload to the file in S3. - """ - - try: - s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_name) - except s3_client.exceptions.NoSuchKey: - pass - - s3_client.put_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_name, Body=existing_content) - @patch("logging_decorators.send_log_to_firehose") def test_lambda_handler_main(self, mock_send_log_to_firehose): """Test lambda handler with dynamic ack_file_name and consistent row_template.""" - test_bucket_name = "immunisation-batch-internal-testlambda-data-destinations" - self.setup_s3() existing_content = ValidValues.test_ack_header test_cases = [ @@ -212,25 +192,23 @@ def test_lambda_handler_main(self, mock_send_log_to_firehose): self.assertEqual(response["statusCode"], 200) self.assertEqual(response["body"], '"Lambda function executed successfully!"') - retrieved_object = s3_client.get_object(Bucket=test_bucket_name, Key=file_info["ack_file_name"]) + retrieved_object = s3_client.get_object( + Bucket=BucketNames.DESTINATION, Key=file_info["ack_file_name"] + ) actual_ack_file_content = retrieved_object["Body"].read().decode("utf-8") self.create_expected_ack_content(test_data["rows"], actual_ack_file_content, existing_content) mock_send_log_to_firehose.assert_called() - s3_client.delete_object(Bucket=test_bucket_name, Key=file_info["ack_file_name"]) + s3_client.delete_object(Bucket=BucketNames.DESTINATION, Key=file_info["ack_file_name"]) @patch("logging_decorators.send_log_to_firehose") def test_lambda_handler_existing(self, mock_send_log_to_firehose): """Test lambda handler with dynamic ack_file_name and consistent row_template with an already existing ack file with content.""" - os.environ["ACK_BUCKET_NAME"] = DESTINATION_BUCKET_NAME existing_content = ValidValues.existing_ack_file_content - s3_client.create_bucket( - Bucket=DESTINATION_BUCKET_NAME, CreateBucketConfiguration={"LocationConstraint": AWS_REGION} - ) test_cases = [ { @@ -263,7 +241,7 @@ def test_lambda_handler_existing(self, mock_send_log_to_firehose): with self.subTest(msg=case["description"]): # Generate unique file names and set up the S3 file file_info = self.generate_file_names() - self.environment_setup(file_info["ack_file_name"], existing_content) + self.setup_existing_ack_file(file_info["ack_file_name"], existing_content) test_data = {"rows": [{**file_info["row_template"], **row} for row in case["rows"]]} @@ -275,12 +253,12 @@ def test_lambda_handler_existing(self, mock_send_log_to_firehose): self.assertEqual(response["body"], '"Lambda function executed successfully!"') retrieved_object = s3_client.get_object( - Bucket=DESTINATION_BUCKET_NAME, Key=file_info["ack_file_name"] + Bucket=BucketNames.DESTINATION, Key=file_info["ack_file_name"] ) actual_ack_file_content = retrieved_object["Body"].read().decode("utf-8") self.assertIn( - "123^5|OK|Information|OK|30001|Business|30001|Success|20241115T13435500||999^TEST|||True", + f"123^5|OK|Information|OK|30001|Business|30001|Success|{mock_file.created_at_formatted_string}||999^TEST|||True", actual_ack_file_content, ) self.assertIn(ValidValues.test_ack_header, actual_ack_file_content) @@ -289,17 +267,14 @@ def test_lambda_handler_existing(self, mock_send_log_to_firehose): mock_send_log_to_firehose.assert_called() - s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=file_info["ack_file_name"]) + s3_client.delete_object(Bucket=BucketNames.DESTINATION, Key=file_info["ack_file_name"]) def test_update_ack_file(self): """Test creating ack file with and without diagnostics""" - self.setup_s3() test_cases = [ { "description": "Single successful row", - "file_key": "COVID19_Vaccinations_v5_YGM41_20240909T13005902.csv", - "created_at_formatted_string": "20241115T13435500", "input_row": [ValidValues.create_ack_data_successful_row], "expected_row": [ ValidValues.update_ack_file_successful_row_no_immsid, @@ -307,8 +282,6 @@ def test_update_ack_file(self): }, { "description": "With multiple rows - failure and success rows", - "file_key": "COVID19_Vaccinations_v5_YGM41_20240909T13005902.csv", - "created_at_formatted_string": "20241115T13435500", "input_row": [ ValidValues.create_ack_data_successful_row, {**ValidValues.create_ack_data_failure_row, "IMMS_ID": "TEST_IMMS_ID"}, @@ -326,8 +299,6 @@ def test_update_ack_file(self): }, { "description": "Multiple rows With different diagnostics", - "file_key": "COVID19_Vaccinations_v5_YGM41_20240909T13005902.csv", - "created_at_formatted_string": "20241115T13435500", "input_row": [ {**ValidValues.create_ack_data_failure_row, "OPERATION_OUTCOME": "Error 1"}, {**ValidValues.create_ack_data_failure_row, "OPERATION_OUTCOME": "Error 2"}, @@ -349,48 +320,42 @@ def test_update_ack_file(self): ack_data_rows_with_id = [] for row in deepcopy(case["input_row"]): ack_data_rows_with_id.append(row) - update_ack_file(case["file_key"], case["created_at_formatted_string"], ack_data_rows_with_id) - created_string = case["created_at_formatted_string"] - expected_file_key = ( - f"forwardedFile/{case['file_key'].replace('.csv', f'_BusAck_{created_string}.csv')}" - ) - - objects = s3_client.list_objects_v2(Bucket=test_bucket_name) - self.assertIn(expected_file_key, [obj["Key"] for obj in objects.get("Contents", [])]) - retrieved_object = s3_client.get_object(Bucket=test_bucket_name, Key=ack_file_key) + update_ack_file(mock_file.file_key, mock_file.created_at_formatted_string, ack_data_rows_with_id) + expected_ack_file_key = mock_file.ack_file_key + objects = s3_client.list_objects_v2(Bucket=BucketNames.DESTINATION) + self.assertIn(expected_ack_file_key, [obj["Key"] for obj in objects.get("Contents", [])]) + retrieved_object = s3_client.get_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) retrieved_body = retrieved_object["Body"].read().decode("utf-8") for expected_row in deepcopy(case["expected_row"]): self.assertIn(expected_row, retrieved_body) - s3_client.delete_object(Bucket=test_bucket_name, Key=ack_file_key) + s3_client.delete_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) def test_update_ack_file_existing(self): """Test appending new rows to an existing ack file.""" - os.environ["ACK_BUCKET_NAME"] = DESTINATION_BUCKET_NAME - # Mock existing content in the ack file existing_content = ValidValues.existing_ack_file_content file_key = "RSV_Vaccinations_v5_TEST_20240905T13005922.csv" - ack_file_key = f"forwardedFile/RSV_Vaccinations_v5_TEST_20240905T13005922_BusAck_20241115T13435500.csv" + ack_file_key = f"forwardedFile/RSV_Vaccinations_v5_TEST_20240905T13005922_BusAck_{mock_file.created_at_formatted_string}.csv" ack_data_rows = [ ValidValues.create_ack_data_successful_row, ValidValues.create_ack_data_failure_row, ] - self.setup_existing_ack_file(DESTINATION_BUCKET_NAME, ack_file_key, existing_content) - retrieved_object = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) + self.setup_existing_ack_file(ack_file_key, existing_content) + retrieved_object = s3_client.get_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) retrieved_body = retrieved_object["Body"].read().decode("utf-8") with patch("update_ack_file.s3_client", s3_client): - update_ack_file(file_key, CREATED_AT_FORMATTED_STRING, ack_data_rows) - retrieved_object = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) + update_ack_file(file_key, MOCK_CREATED_AT_FORMATTED_STRING, ack_data_rows) + retrieved_object = s3_client.get_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) retrieved_body = retrieved_object["Body"].read().decode("utf-8") self.assertIn( - "123^5|OK|Information|OK|30001|Business|30001|Success|20241115T13435500||999^TEST|||True", + f"123^5|OK|Information|OK|30001|Business|30001|Success|{mock_file.created_at_formatted_string}||999^TEST|||True", retrieved_body, ) @@ -398,10 +363,10 @@ def test_update_ack_file_existing(self): self.assertIn("123^1|OK|", retrieved_body) self.assertIn("123^1|Fatal Error|", retrieved_body) - objects = s3_client.list_objects_v2(Bucket=DESTINATION_BUCKET_NAME) + objects = s3_client.list_objects_v2(Bucket=BucketNames.DESTINATION) self.assertIn(ack_file_key, [obj["Key"] for obj in objects.get("Contents", [])]) - s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) + s3_client.delete_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) def test_create_ack_data(self): """Test create_ack_data with success and failure cases.""" @@ -409,7 +374,7 @@ def test_create_ack_data(self): test_cases = [ { "description": "Success row", - "created_at_formatted_string": "20241115T13435500", + "created_at_formatted_string": mock_file.created_at_formatted_string, "local_id": "local123", "row_id": "row456", "successful_api_response": True, @@ -453,38 +418,31 @@ def test_create_ack_data(self): self.assertEqual(result, expected_result) - @mock_s3 def test_obtain_current_ack_content_file_no_existing(self): """Test obtain current ack content when there a file does not already exist.""" - os.environ["ACK_BUCKET_NAME"] = test_bucket_name - ack_bucket_name = "immunisation-batch-internal-testlambda-data-destinations" ACK_KEY = "forwardedFile/COVID19_Vaccinations_v5_YGM41_20240909T13005902_BusAck_20241115T13454555.csv" - self.setup_s3() with patch("update_ack_file.s3_client", s3_client): - result = obtain_current_ack_content(ack_bucket_name, ACK_KEY) - - self.assertEqual(result.getvalue(), ValidValues.test_ack_header) + result = obtain_current_ack_content(BucketNames.DESTINATION, ACK_KEY) + self.assertEqual(result.getvalue(), ValidValues.test_ack_header) - @mock_s3 def test_obtain_current_ack_content_file_exists(self): """Test that the existing ack file content is retrieved and new rows are added.""" - existing_content = ValidValues.existing_ack_file_content - self.setup_existing_ack_file(DESTINATION_BUCKET_NAME, ack_file_key, existing_content) + self.setup_existing_ack_file(ack_file_key, existing_content) with patch("update_ack_file.s3_client", s3_client): - result = obtain_current_ack_content(DESTINATION_BUCKET_NAME, ack_file_key) + result = obtain_current_ack_content(BucketNames.DESTINATION, ack_file_key) self.assertIn(existing_content, result.getvalue()) self.assertEqual(result.getvalue(), existing_content) - retrieved_object = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) + retrieved_object = s3_client.get_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) retrieved_body = retrieved_object["Body"].read().decode("utf-8") self.assertEqual(retrieved_body, existing_content) - objects = s3_client.list_objects_v2(Bucket=DESTINATION_BUCKET_NAME) + objects = s3_client.list_objects_v2(Bucket=BucketNames.DESTINATION) self.assertIn(ack_file_key, [obj["Key"] for obj in objects.get("Contents", [])]) - s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) + s3_client.delete_object(Bucket=BucketNames.DESTINATION, Key=ack_file_key) @patch("logging_decorators.send_log_to_firehose") @patch("update_ack_file.create_ack_data") @@ -522,11 +480,6 @@ def test_lambda_handler_error_scenarios( self.assertIn(scenario["expected_message"], error_log["diagnostics"]) mock_send_log_to_firehose.reset_mock() - def tearDown(self): - """'Clear all mock resources""" - # Clean up mock resources - os.environ.pop("ACK_BUCKET_NAME", None) - if __name__ == "__main__": unittest.main() diff --git a/ack_backend/tests/test_utils_for_ack_backend.py b/ack_backend/tests/test_utils_for_ack_backend.py index f89a736de..cad94a811 100644 --- a/ack_backend/tests/test_utils_for_ack_backend.py +++ b/ack_backend/tests/test_utils_for_ack_backend.py @@ -2,18 +2,24 @@ from datetime import datetime -SOURCE_BUCKET_NAME = "immunisation-batch-internal-test-data-sources" -DESTINATION_BUCKET_NAME = "immunisation-batch-internal-test-data-destinations" -CONFIG_BUCKET_NAME = "immunisation-batch-internal-dev-configs" STREAM_NAME = "imms-batch-internal-dev-processingdata-stream" -CREATED_AT_FORMATTED_STRING = "20241115T13435500" +MOCK_CREATED_AT_FORMATTED_STRING = "20211120T12000000" IMMS_ID = "Immunization#932796c8-fd20-4d31-a4d7-e9613de70ad6" -AWS_REGION = "eu-west-2" +REGION_NAME = "eu-west-2" STATIC_DATETIME = datetime(2021, 11, 20, 12, 0, 0) +class BucketNames: + """Bucket Names for testing""" + + DESTINATION = "immunisation-batch-internal-test-data-destinations" + + +MOCK_ENVIRONMENT_DICT = {"ACK_BUCKET_NAME": BucketNames.DESTINATION} + + class DiagnosticsDictionaries: """Example diagnostics dictionaries which may be received from the record forwarder""" @@ -163,7 +169,7 @@ class ValidValues: "RESPONSE_TYPE": "Business", "RESPONSE_CODE": "30001", "RESPONSE_DISPLAY": "Success", - "RECEIVED_TIME": CREATED_AT_FORMATTED_STRING, + "RECEIVED_TIME": MOCK_CREATED_AT_FORMATTED_STRING, "MAILBOX_FROM": "", "LOCAL_ID": local_id, "IMMS_ID": "", @@ -180,7 +186,7 @@ class ValidValues: "RESPONSE_TYPE": "Business", "RESPONSE_CODE": "30002", "RESPONSE_DISPLAY": "Business Level Response Value - Processing Error", - "RECEIVED_TIME": CREATED_AT_FORMATTED_STRING, + "RECEIVED_TIME": MOCK_CREATED_AT_FORMATTED_STRING, "MAILBOX_FROM": "", "LOCAL_ID": local_id, "IMMS_ID": "", @@ -189,29 +195,29 @@ class ValidValues: } update_ack_file_successful_row_no_immsid = ( - f"123^1|OK|Information|OK|30001|Business|30001|Success|{CREATED_AT_FORMATTED_STRING}||{local_id}|||True\n" + f"123^1|OK|Information|OK|30001|Business|30001|Success|{MOCK_CREATED_AT_FORMATTED_STRING}||{local_id}|||True\n" ) update_ack_file_failure_row_no_immsid = ( "123^1|Fatal Error|Fatal|Fatal Error|30002|Business|30002|" - f"Business Level Response Value - Processing Error|{CREATED_AT_FORMATTED_STRING}|" + f"Business Level Response Value - Processing Error|{MOCK_CREATED_AT_FORMATTED_STRING}|" f"|{local_id}||Error_value|False\n" ) update_ack_file_successful_row_immsid = ( "123^1|OK|Information|OK|30001|Business|30001|Success" - f"|{CREATED_AT_FORMATTED_STRING}||{local_id}|{imms_id}||True\n" + f"|{MOCK_CREATED_AT_FORMATTED_STRING}||{local_id}|{imms_id}||True\n" ) update_ack_file_failure_row_immsid = ( "123^1|Fatal Error|Fatal|Fatal Error|30002|Business|30002|Business Level Response Value - Processing Error" - f"|{CREATED_AT_FORMATTED_STRING}||{local_id}|{imms_id}|Error_value|False\n" + f"|{MOCK_CREATED_AT_FORMATTED_STRING}||{local_id}|{imms_id}|Error_value|False\n" ) existing_ack_file_content = ( "MESSAGE_HEADER_ID|HEADER_RESPONSE_CODE|ISSUE_SEVERITY|ISSUE_CODE|ISSUE_DETAILS_CODE|RESPONSE_TYPE|" "RESPONSE_CODE|RESPONSE_DISPLAY|RECEIVED_TIME|MAILBOX_FROM|LOCAL_ID|IMMS_ID|OPERATION_OUTCOME" - "|MESSAGE_DELIVERY\n123^5|OK|Information|OK|30001|Business|30001|Success|20241115T13435500||999^TEST|||True\n" + f"|MESSAGE_DELIVERY\n123^5|OK|Information|OK|30001|Business|30001|Success|{MOCK_CREATED_AT_FORMATTED_STRING}||999^TEST|||True\n" ) test_ack_header = ( @@ -239,3 +245,82 @@ class InvalidValues: "statusCode": 500, "diagnostics": "An unhandled error occurred during batch processing", } + + +class GenericSetUp: + """ + Performs generic setup of mock resources: + * If s3_client is provided, creates source, destination and firehose buckets (firehose bucket is used for testing + only) + * If firehose_client is provided, creates a firehose delivery stream + """ + + def __init__(self, s3_client=None, firehose_client=None): + + if s3_client: + for bucket_name in [BucketNames.DESTINATION]: + s3_client.create_bucket( + Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": REGION_NAME} + ) + + # if firehose_client: + # firehose_client.create_delivery_stream( + # DeliveryStreamName=Firehose.STREAM_NAME, + # DeliveryStreamType="DirectPut", + # S3DestinationConfiguration={ + # "RoleARN": "arn:aws:iam::123456789012:role/mock-role", + # "BucketARN": "arn:aws:s3:::" + BucketNames.MOCK_FIREHOSE, + # "Prefix": "firehose-backup/", + # }, + # ) + + +class GenericTearDown: + """Performs generic tear down of mock resources""" + + def __init__(self, s3_client=None, firehose_client=None, kinesis_client=None): + + if s3_client: + for bucket_name in [BucketNames.DESTINATION]: + for obj in s3_client.list_objects_v2(Bucket=bucket_name).get("Contents", []): + s3_client.delete_object(Bucket=bucket_name, Key=obj["Key"]) + s3_client.delete_bucket(Bucket=bucket_name) + + # if firehose_client: + # firehose_client.delete_delivery_stream(DeliveryStreamName=Firehose.STREAM_NAME) + + +class FileDetails: + """ + Class to create and hold values for a mock file, based on the vaccine type, supplier and ods code. + NOTE: Supplier and ODS code are hardcoded rather than mapped, for testing purposes. + NOTE: The permissions_list and permissions_config are examples of full permissions for the suppler for the + vaccine type. + """ + + def __init__(self, vaccine_type: str, supplier: str, ods_code: str): + self.name = f"{vaccine_type.upper()}/ {supplier.upper()} file" + self.created_at_formatted_string = MOCK_CREATED_AT_FORMATTED_STRING + self.file_key = f"{vaccine_type}_Vaccinations_v5_{ods_code}_20210730T12000000.csv" + self.ack_file_key = ( + f"forwardedFile/{vaccine_type}_Vaccinations_v5_{ods_code}_20210730T12000000_BusAck_20211120T12000000.csv" + ) + self.vaccine_type = vaccine_type + self.ods_code = ods_code + self.supplier = supplier + self.message_id = f"{vaccine_type.lower()}_{supplier.lower()}_test_id" + + self.base_event = { + "file_key": self.file_key, + "supplier": self.supplier, + "vaccine_type": self.vaccine_type, + "created_at_formatted_string": self.created_at_formatted_string, + } + + +class MockFileDetails: + """Class containing mock file details for use in tests""" + + rsv_ravs = FileDetails("RSV", "RAVS", "X26") + rsv_emis = FileDetails("RSV", "EMIS", "8HK48") + flu_emis = FileDetails("FLU", "EMIS", "YGM41") diff --git a/backend/src/constants.py b/backend/src/constants.py index 95ff52d12..8c3632cd2 100644 --- a/backend/src/constants.py +++ b/backend/src/constants.py @@ -20,3 +20,6 @@ class Urls: ) ods_organization_code = "https://fhir.nhs.uk/Id/ods-organization-code" urn_school_number = "https://fhir.hl7.org.uk/Id/urn-school-number" + + +GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE = "Unable to process request. Issue may be transient." diff --git a/backend/src/create_imms_handler.py b/backend/src/create_imms_handler.py index a23bac4c4..388bf4731 100644 --- a/backend/src/create_imms_handler.py +++ b/backend/src/create_imms_handler.py @@ -7,6 +7,7 @@ from local_lambda import load_string from models.errors import Severity, Code, create_operation_outcome from log_structure import function_info +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE @function_info @@ -17,9 +18,12 @@ def create_imms_handler(event, context): def create_immunization(event, controller: FhirController): try: return controller.create_immunization(event) - except Exception as e: + except Exception: # pylint: disable = broad-exception-caught exp_error = create_operation_outcome( - resource_id=str(uuid.uuid4()), severity=Severity.error, code=Code.server_error, diagnostics=str(e) + resource_id=str(uuid.uuid4()), + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, ) return FhirController.create_response(500, exp_error) diff --git a/backend/src/delete_imms_handler.py b/backend/src/delete_imms_handler.py index 879084a34..c66b69e24 100644 --- a/backend/src/delete_imms_handler.py +++ b/backend/src/delete_imms_handler.py @@ -6,6 +6,7 @@ from fhir_controller import FhirController, make_controller from models.errors import Severity, Code, create_operation_outcome from log_structure import function_info +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE @function_info @@ -16,10 +17,13 @@ def delete_imms_handler(event, context): def delete_immunization(event, controller: FhirController): try: return controller.delete_immunization(event) - except Exception as e: - exp_error = create_operation_outcome(resource_id=str(uuid.uuid4()), severity=Severity.error, - code=Code.server_error, - diagnostics=str(e)) + except Exception: # pylint: disable = broad-exception-caught + exp_error = create_operation_outcome( + resource_id=str(uuid.uuid4()), + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) return FhirController.create_response(500, exp_error) @@ -29,13 +33,11 @@ def delete_immunization(event, controller: FhirController): args = parser.parse_args() event = { - "pathParameters": { - "id": args.id - }, + "pathParameters": {"id": args.id}, "headers": { - 'Content-Type': 'application/x-www-form-urlencoded', - 'AuthenticationType': 'ApplicationRestricted', - 'Permissions': (','.join([Permission.DELETE])) - } + "Content-Type": "application/x-www-form-urlencoded", + "AuthenticationType": "ApplicationRestricted", + "Permissions": (",".join([Permission.DELETE])), + }, } pprint.pprint(delete_imms_handler(event, {})) diff --git a/backend/src/get_imms_handler.py b/backend/src/get_imms_handler.py index 62cf52a89..51db2996e 100644 --- a/backend/src/get_imms_handler.py +++ b/backend/src/get_imms_handler.py @@ -6,20 +6,24 @@ from fhir_controller import FhirController, make_controller from models.errors import Severity, Code, create_operation_outcome from log_structure import function_info +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE @function_info -def get_imms_handler(event, context): +def get_imms_handler(event, context): return get_immunization_by_id(event, make_controller()) def get_immunization_by_id(event, controller: FhirController): try: return controller.get_immunization_by_id(event) - except Exception as e: - exp_error = create_operation_outcome(resource_id=str(uuid.uuid4()), severity=Severity.error, - code=Code.server_error, - diagnostics=str(e)) + except Exception: # pylint: disable = broad-exception-caught + exp_error = create_operation_outcome( + resource_id=str(uuid.uuid4()), + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) return FhirController.create_response(500, exp_error) @@ -29,13 +33,11 @@ def get_immunization_by_id(event, controller: FhirController): args = parser.parse_args() event = { - "pathParameters": { - "id": args.id - }, + "pathParameters": {"id": args.id}, "headers": { - 'Content-Type': 'application/x-www-form-urlencoded', - 'AuthenticationType': 'ApplicationRestricted', - 'Permissions': (','.join([Permission.READ])) - } + "Content-Type": "application/x-www-form-urlencoded", + "AuthenticationType": "ApplicationRestricted", + "Permissions": (",".join([Permission.READ])), + }, } pprint.pprint(get_imms_handler(event, {})) diff --git a/backend/src/search_imms_handler.py b/backend/src/search_imms_handler.py index 507565697..d6beaf663 100644 --- a/backend/src/search_imms_handler.py +++ b/backend/src/search_imms_handler.py @@ -9,6 +9,7 @@ from authorization import Permission from fhir_controller import FhirController, make_controller from models.errors import Severity, Code, create_operation_outcome +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE from log_structure import function_info import base64 import urllib.parse @@ -21,26 +22,33 @@ def search_imms_handler(event: events.APIGatewayProxyEventV1, context: context_) def search_imms(event: events.APIGatewayProxyEventV1, controller: FhirController): try: - query_params = event.get('queryStringParameters', {}) - body=event["body"] + query_params = event.get("queryStringParameters", {}) + body = event["body"] body_has_immunization_identifier = False query_string_has_immunization_identifier = False - query_string_has_element =False + query_string_has_element = False body_has_immunization_element = False - if not (query_params == None and body== None) : + if not (query_params == None and body == None): if query_params: - query_string_has_immunization_identifier = 'immunization.identifier' in event.get('queryStringParameters', {}) - query_string_has_element = '_element' in event.get('queryStringParameters', {}) + query_string_has_immunization_identifier = "immunization.identifier" in event.get( + "queryStringParameters", {} + ) + query_string_has_element = "_element" in event.get("queryStringParameters", {}) # Decode body from base64 - if event['body']: - decoded_body = base64.b64decode(event['body']).decode('utf-8') + if event["body"]: + decoded_body = base64.b64decode(event["body"]).decode("utf-8") # Parse the URL encoded body parsed_body = urllib.parse.parse_qs(decoded_body) # Check for 'immunization.identifier' in body - body_has_immunization_identifier = 'immunization.identifier' in parsed_body - body_has_immunization_element = '_element' in parsed_body - if query_string_has_immunization_identifier or body_has_immunization_identifier or query_string_has_element or body_has_immunization_element: + body_has_immunization_identifier = "immunization.identifier" in parsed_body + body_has_immunization_element = "_element" in parsed_body + if ( + query_string_has_immunization_identifier + or body_has_immunization_identifier + or query_string_has_element + or body_has_immunization_element + ): return controller.get_immunization_by_identifier(event) response = controller.search_immunizations(event) else: @@ -58,12 +66,12 @@ def search_imms(event: events.APIGatewayProxyEventV1, controller: FhirController ) return FhirController.create_response(400, exp_error) return response - except Exception as e: + except Exception: # pylint: disable = broad-exception-caught exp_error = create_operation_outcome( resource_id=str(uuid.uuid4()), severity=Severity.error, code=Code.server_error, - diagnostics=traceback.format_exc(), + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, ) return FhirController.create_response(500, exp_error) @@ -92,15 +100,9 @@ def search_imms(event: events.APIGatewayProxyEventV1, controller: FhirController help="Identifier of System", type=str, required=False, - dest="immunization_identifier" - ) - parser.add_argument( - "--element", - help="Identifier of System", - type=str, - required=False, - dest="_element" + dest="immunization_identifier", ) + parser.add_argument("--element", help="Identifier of System", type=str, required=False, dest="_element") args = parser.parse_args() event: events.APIGatewayProxyEventV1 = { @@ -111,7 +113,7 @@ def search_imms(event: events.APIGatewayProxyEventV1, controller: FhirController "-date.to": [args.date_to] if args.date_to else [], "_include": ["Immunization:patient"], "immunization_identifier": [args.immunization_identifier] if args.immunization_identifier else [], - "_element": [args._element] if args._element else [] + "_element": [args._element] if args._element else [], }, "httpMethod": "POST", "headers": { diff --git a/backend/src/update_imms_handler.py b/backend/src/update_imms_handler.py index b45f1e1fe..bb3a09589 100644 --- a/backend/src/update_imms_handler.py +++ b/backend/src/update_imms_handler.py @@ -7,6 +7,7 @@ from local_lambda import load_string from models.errors import Severity, Code, create_operation_outcome from log_structure import function_info +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE @function_info @@ -17,10 +18,13 @@ def update_imms_handler(event, context): def update_imms(event, controller: FhirController): try: return controller.update_immunization(event) - except Exception as e: - exp_error = create_operation_outcome(resource_id=str(uuid.uuid4()), severity=Severity.error, - code=Code.server_error, - diagnostics=str(e)) + except Exception: # pylint: disable = broad-exception-caught + exp_error = create_operation_outcome( + resource_id=str(uuid.uuid4()), + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) return FhirController.create_response(500, exp_error) @@ -31,15 +35,13 @@ def update_imms(event, controller: FhirController): args = parser.parse_args() event = { - "pathParameters": { - "id": args.id - }, + "pathParameters": {"id": args.id}, "body": load_string(args.path), "headers": { - 'Content-Type': 'application/x-www-form-urlencoded', - 'AuthenticationType': 'ApplicationRestricted', - 'Permissions': (','.join([Permission.UPDATE])) - } + "Content-Type": "application/x-www-form-urlencoded", + "AuthenticationType": "ApplicationRestricted", + "Permissions": (",".join([Permission.UPDATE])), + }, } pprint.pprint(event) diff --git a/backend/tests/test_create_imms.py b/backend/tests/test_create_imms.py index e7a1ec25d..2191f28ab 100644 --- a/backend/tests/test_create_imms.py +++ b/backend/tests/test_create_imms.py @@ -5,8 +5,9 @@ from create_imms_handler import create_immunization from fhir_controller import FhirController from models.errors import Severity, Code, create_operation_outcome +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE + -"test" class TestCreateImmunizationById(unittest.TestCase): def setUp(self): self.controller = create_autospec(FhirController) @@ -31,9 +32,12 @@ def test_handle_exception(self): error_msg = "an unhandled error" self.controller.create_immunization.side_effect = Exception(error_msg) - exp_error = create_operation_outcome(resource_id=None, severity=Severity.error, - code=Code.server_error, - diagnostics=error_msg) + exp_error = create_operation_outcome( + resource_id=None, + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) # When act_res = create_immunization(lambda_event, self.controller) diff --git a/backend/tests/test_delete_imms.py b/backend/tests/test_delete_imms.py index 1526b3e28..b9a453608 100644 --- a/backend/tests/test_delete_imms.py +++ b/backend/tests/test_delete_imms.py @@ -5,8 +5,9 @@ from delete_imms_handler import delete_immunization from fhir_controller import FhirController from models.errors import Severity, Code, create_operation_outcome +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE + -"test" class TestDeleteImmunizationById(unittest.TestCase): def setUp(self): self.controller = create_autospec(FhirController) @@ -31,9 +32,12 @@ def test_handle_exception(self): error_msg = "an unhandled error" self.controller.delete_immunization.side_effect = Exception(error_msg) - exp_error = create_operation_outcome(resource_id=None, severity=Severity.error, - code=Code.server_error, - diagnostics=error_msg) + exp_error = create_operation_outcome( + resource_id=None, + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) # When act_res = delete_immunization(lambda_event, self.controller) diff --git a/backend/tests/test_get_imms.py b/backend/tests/test_get_imms.py index 385733993..6862ac57e 100644 --- a/backend/tests/test_get_imms.py +++ b/backend/tests/test_get_imms.py @@ -5,15 +5,17 @@ from fhir_controller import FhirController from get_imms_handler import get_immunization_by_id from models.errors import Severity, Code, create_operation_outcome +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE + + -"test" class TestGetImmunisationById(unittest.TestCase): def setUp(self): self.controller = create_autospec(FhirController) def test_get_immunization_by_id(self): """it should return Immunization by id""" - lambda_event = {'headers': {'id': 'an-id'},"pathParameters": {"id": "an-id"}} + lambda_event = {"headers": {"id": "an-id"}, "pathParameters": {"id": "an-id"}} exp_res = {"a-key": "a-value"} self.controller.get_immunization_by_id.return_value = exp_res @@ -27,13 +29,16 @@ def test_get_immunization_by_id(self): def test_handle_exception(self): """unhandled exceptions should result in 500""" - lambda_event = {'headers': {'id': 'an-id'},"pathParameters": {"id": "an-id"}} + lambda_event = {"headers": {"id": "an-id"}, "pathParameters": {"id": "an-id"}} error_msg = "an unhandled error" self.controller.get_immunization_by_id.side_effect = Exception(error_msg) - exp_error = create_operation_outcome(resource_id=None, severity=Severity.error, - code=Code.server_error, - diagnostics=error_msg) + exp_error = create_operation_outcome( + resource_id=None, + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, + ) # When act_res = get_immunization_by_id(lambda_event, self.controller) diff --git a/backend/tests/test_search_imms.py b/backend/tests/test_search_imms.py index 18dca7fff..af11c0f6c 100644 --- a/backend/tests/test_search_imms.py +++ b/backend/tests/test_search_imms.py @@ -6,17 +6,18 @@ from models.errors import Severity, Code, create_operation_outcome from search_imms_handler import search_imms from pathlib import Path +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE script_location = Path(__file__).absolute().parent -"test" + class TestSearchImmunizations(unittest.TestCase): def setUp(self): self.controller = create_autospec(FhirController) def test_search_immunizations(self): """it should return a list of Immunizations""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":None} + lambda_event = {"pathParameters": {"id": "an-id"}, "body": None} exp_res = {"a-key": "a-value"} self.controller.search_immunizations.return_value = exp_res @@ -30,9 +31,14 @@ def test_search_immunizations(self): def test_search_immunizations_to_get_imms_id(self): """it should return a list of Immunizations""" - lambda_event = {"pathParameters": {"id": "an-id"}, - "queryStringParameters": {'immunization.identifier': 'https://supplierABC/identifiers/vacc|f10b59b3-fc73-4616-99c9-9e882ab31184','_element':'id,meta'}, - "body":None} + lambda_event = { + "pathParameters": {"id": "an-id"}, + "queryStringParameters": { + "immunization.identifier": "https://supplierABC/identifiers/vacc|f10b59b3-fc73-4616-99c9-9e882ab31184", + "_element": "id,meta", + }, + "body": None, + } exp_res = {"a-key": "a-value"} self.controller.get_immunization_by_identifier.return_value = exp_res @@ -42,12 +48,15 @@ def test_search_immunizations_to_get_imms_id(self): # Then self.controller.get_immunization_by_identifier.assert_called_once_with(lambda_event) - self.assertDictEqual(exp_res, act_res) - + self.assertDictEqual(exp_res, act_res) def test_search_immunizations_get_id_from_body(self): """it should return a list of Immunizations""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":'cGF0aWVudC5pZGVudGlmaWVyPWh0dHBzJTNBJTJGJTJGZmhpci5uaHMudWslMkZJZCUyRm5ocy1udW1iZXIlN0M5NjkzNjMyMTA5Ji1pbW11bml6YXRpb24udGFyZ2V0PUNPVklEMTkmX2luY2x1ZGU9SW1tdW5pemF0aW9uJTNBcGF0aWVudCZpbW11bml6YXRpb24uaWRlbnRpZmllcj1odHRwcyUzQSUyRiUyRnN1cHBsaWVyQUJDJTJGaWRlbnRpZmllcnMlMkZ2YWNjJTdDZjEwYjU5YjMtZmM3My00NjE2LTk5YzktOWU4ODJhYjMxMTg0Jl9lbGVtZW50PWlkJTJDbWV0YSZpZD1z','queryStringParameters':None} + lambda_event = { + "pathParameters": {"id": "an-id"}, + "body": "cGF0aWVudC5pZGVudGlmaWVyPWh0dHBzJTNBJTJGJTJGZmhpci5uaHMudWslMkZJZCUyRm5ocy1udW1iZXIlN0M5NjkzNjMyMTA5Ji1pbW11bml6YXRpb24udGFyZ2V0PUNPVklEMTkmX2luY2x1ZGU9SW1tdW5pemF0aW9uJTNBcGF0aWVudCZpbW11bml6YXRpb24uaWRlbnRpZmllcj1odHRwcyUzQSUyRiUyRnN1cHBsaWVyQUJDJTJGaWRlbnRpZmllcnMlMkZ2YWNjJTdDZjEwYjU5YjMtZmM3My00NjE2LTk5YzktOWU4ODJhYjMxMTg0Jl9lbGVtZW50PWlkJTJDbWV0YSZpZD1z", + "queryStringParameters": None, + } exp_res = {"a-key": "a-value"} self.controller.get_immunization_by_identifier.return_value = exp_res @@ -57,11 +66,11 @@ def test_search_immunizations_get_id_from_body(self): # Then self.controller.get_immunization_by_identifier.assert_called_once_with(lambda_event) - self.assertDictEqual(exp_res, act_res) + self.assertDictEqual(exp_res, act_res) def test_search_immunizations_get_id_from_body_passing_none(self): """it should enter search_immunizations as both the request params are none""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":None,'queryStringParameters':None} + lambda_event = {"pathParameters": {"id": "an-id"}, "body": None, "queryStringParameters": None} exp_res = {"a-key": "a-value"} self.controller.search_immunizations.return_value = exp_res @@ -71,11 +80,15 @@ def test_search_immunizations_get_id_from_body_passing_none(self): # Then self.controller.search_immunizations.assert_called_once_with(lambda_event) - self.assertDictEqual(exp_res, act_res) - + self.assertDictEqual(exp_res, act_res) + def test_search_immunizations_get_id_from_body_element(self): """it should enter into get_immunization_by_identifier only _element paramter is present""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":'X2VsZW1lbnQ9aWQlMkNtZXRh','queryStringParameters':None} + lambda_event = { + "pathParameters": {"id": "an-id"}, + "body": "X2VsZW1lbnQ9aWQlMkNtZXRh", + "queryStringParameters": None, + } exp_res = {"a-key": "a-value"} self.controller.get_immunization_by_identifier.return_value = exp_res @@ -89,7 +102,11 @@ def test_search_immunizations_get_id_from_body_element(self): def test_search_immunizations_get_id_from_body_imms_identifer(self): """it should enter into get_immunization_by_identifier only immunization.identifier paramter is present""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":'aW1tdW5pemF0aW9uLmlkZW50aWZpZXI9aWQlMkNtZXRh','queryStringParameters':None} + lambda_event = { + "pathParameters": {"id": "an-id"}, + "body": "aW1tdW5pemF0aW9uLmlkZW50aWZpZXI9aWQlMkNtZXRh", + "queryStringParameters": None, + } exp_res = {"a-key": "a-value"} self.controller.get_immunization_by_identifier.return_value = exp_res @@ -99,11 +116,11 @@ def test_search_immunizations_get_id_from_body_imms_identifer(self): # Then self.controller.get_immunization_by_identifier.assert_called_once_with(lambda_event) - self.assertDictEqual(exp_res, act_res) - + self.assertDictEqual(exp_res, act_res) + def test_search_immunizations_lambda_size_limit(self): """it should return 400 as search returned too many results.""" - lambda_event = {"pathParameters": {"id": "an-id"},"body":None} + lambda_event = {"pathParameters": {"id": "an-id"}, "body": None} request_file = script_location / "sample_data" / "sample_input_search_imms.json" with open(request_file) as f: exp_res = json.load(f) @@ -125,7 +142,10 @@ def test_handle_exception(self): self.controller.search_immunizations.side_effect = Exception(error_msg) exp_error = create_operation_outcome( - resource_id=None, severity=Severity.error, code=Code.server_error, diagnostics=error_msg + resource_id=None, + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, ) # When diff --git a/backend/tests/test_update_imms.py b/backend/tests/test_update_imms.py index 593732841..1e439f57a 100644 --- a/backend/tests/test_update_imms.py +++ b/backend/tests/test_update_imms.py @@ -5,8 +5,9 @@ from fhir_controller import FhirController from models.errors import Severity, Code, create_operation_outcome from update_imms_handler import update_imms +from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE + -"test" class TestUpdateImmunizations(unittest.TestCase): def setUp(self): self.controller = create_autospec(FhirController) @@ -32,7 +33,10 @@ def test_handle_exception(self): self.controller.update_immunization.side_effect = Exception(error_msg) exp_error = create_operation_outcome( - resource_id=None, severity=Severity.error, code=Code.server_error, diagnostics=error_msg + resource_id=None, + severity=Severity.error, + code=Code.server_error, + diagnostics=GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, ) # When @@ -42,18 +46,22 @@ def test_handle_exception(self): act_body = json.loads(act_res["body"]) act_body["id"] = None - #self.assertDictEqual(act_body, exp_error) + self.assertDictEqual(act_body, exp_error) self.assertEqual(act_res["statusCode"], 500) def test_update_imms_with_duplicated_identifier_returns_error(self): """Should return an IdentifierDuplication error""" lambda_event = {"pathParameters": {"id": "an-id"}} - error_msg = {'statusCode': 422, 'headers': {'Content-Type': 'application/fhir+json'}, 'body': '{"resourceType": "OperationOutcome", "id": "5c132d8a-a928-4e0e-8792-0c6456e625c2", "meta": {"profile": ["https://simplifier.net/guide/UKCoreDevelopment2/ProfileUKCore-OperationOutcome"]}, "issue": [{"severity": "error", "code": "exception", "details": {"coding": [{"system": "https://fhir.nhs.uk/Codesystem/http-error-codes","code": "DUPLICATE"}]}, "diagnostics": "The provided identifier: id-id is duplicated"}]}'} + error_msg = { + "statusCode": 422, + "headers": {"Content-Type": "application/fhir+json"}, + "body": '{"resourceType": "OperationOutcome", "id": "5c132d8a-a928-4e0e-8792-0c6456e625c2", "meta": {"profile": ["https://simplifier.net/guide/UKCoreDevelopment2/ProfileUKCore-OperationOutcome"]}, "issue": [{"severity": "error", "code": "exception", "details": {"coding": [{"system": "https://fhir.nhs.uk/Codesystem/http-error-codes","code": "DUPLICATE"}]}, "diagnostics": "The provided identifier: id-id is duplicated"}]}', + } self.controller.update_immunization.return_value = error_msg act_res = update_imms(lambda_event, self.controller) - + # Then self.controller.update_immunization.assert_called_once_with(lambda_event) self.assertEqual(act_res["statusCode"], 422) - self.assertDictEqual(act_res, error_msg) \ No newline at end of file + self.assertDictEqual(act_res, error_msg)