From 34328acae1e8f66b67d37c842dd575eca74585b6 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 11:55:10 +0000 Subject: [PATCH 1/8] Retiring send email lambda from DI --- Makefile | 8 - README.md | 2 +- application/send_email/__init__.py | 0 application/send_email/requirements.txt | 1 - application/send_email/send_email.py | 75 --------- application/send_email/tests/__init__.py | 0 .../send_email/tests/test_send_email.py | 149 ------------------ .../reject_pending_changes/pending_changes.py | 87 +--------- .../rejection-email.html | 52 ------ .../service_sync/reject_pending_changes/s3.py | 25 --- .../tests/test_pending_changes.py | 86 ---------- .../reject_pending_changes/tests/test_s3.py | 29 ---- build/automation/var/project.mk | 9 +- .../stacks/application/cloudwatch-metrics.tf | 30 ---- infrastructure/stacks/application/iam.tf | 49 ------ infrastructure/stacks/application/lambda.tf | 44 ------ infrastructure/stacks/application/outputs.tf | 1 - infrastructure/stacks/application/splunk.tf | 8 - .../stacks/application/variables.tf | 22 --- .../cloudwatch-alarms-lambda-error-rate.tf | 47 ------ .../blue-green-link/cloudwatch-alarms.tf | 17 -- .../blue-green-link/cloudwatch-dashboards.tf | 3 - .../stacks/blue-green-link/variables.tf | 5 - .../cloudwatch-queries/cloudwatch-queries.tf | 7 - .../stacks/cloudwatch-queries/variables.tf | 5 - .../build-deploy-test-release-buildspec.yml | 10 -- .../task-env-deploy-and-test-buildspec.yml | 11 -- .../locals.tf | 2 +- infrastructure/stacks/shared-resources/s3.tf | 67 -------- test/integration/steps/functions/aws/s3.py | 35 ---- test/integration/steps/test_steps.py | 12 -- 31 files changed, 5 insertions(+), 893 deletions(-) delete mode 100644 application/send_email/__init__.py delete mode 100644 application/send_email/requirements.txt delete mode 100644 application/send_email/send_email.py delete mode 100644 application/send_email/tests/__init__.py delete mode 100644 application/send_email/tests/test_send_email.py delete mode 100644 application/service_sync/reject_pending_changes/rejection-email.html delete mode 100644 application/service_sync/reject_pending_changes/s3.py delete mode 100644 application/service_sync/reject_pending_changes/tests/test_s3.py delete mode 100644 infrastructure/stacks/shared-resources/s3.tf delete mode 100644 test/integration/steps/functions/aws/s3.py diff --git a/Makefile b/Makefile index 1af78e340..870c2d76c 100644 --- a/Makefile +++ b/Makefile @@ -78,7 +78,6 @@ UNIT_TEST_ARGS=" \ --volume $(APPLICATION_DIR)/dos_db_update_dlq_handler:/tmp/.packages/dos_db_update_dlq_handler \ --volume $(APPLICATION_DIR)/event_replay:/tmp/.packages/event_replay \ --volume $(APPLICATION_DIR)/ingest_change_event:/tmp/.packages/ingest_change_event \ - --volume $(APPLICATION_DIR)/send_email:/tmp/.packages/send_email \ --volume $(APPLICATION_DIR)/service_matcher:/tmp/.packages/service_matcher \ --volume $(APPLICATION_DIR)/service_sync:/tmp/.packages/service_sync \ --volume $(APPLICATION_DIR)/slack_messenger:/tmp/.packages/slack_messenger \ @@ -159,12 +158,6 @@ event-replay-build-and-deploy: ### Build and deploy event replay lambda docker i ingest-change-event-build-and-deploy: ### Build and deploy ingest change event lambda docker image - mandatory: PROFILE, ENVIRONMENT make build-and-deploy-single-function FUNCTION_NAME=ingest-change-event INGEST_CHANGE_EVENT_VERSION=$(BUILD_TAG) VERSION=$(BUILD_TAG) -# ============================================================================== -# Send Email - -send-email-build-and-deploy: ### Build and deploy send email lambda docker image - mandatory: PROFILE, ENVIRONMENT - make build-and-deploy-single-function FUNCTION_NAME=send-email SEND_EMAIL_VERSION=$(BUILD_TAG) VERSION=$(BUILD_TAG) - # ============================================================================== # Service Matcher @@ -217,7 +210,6 @@ get-lambda-versions-if-empty: [[ -z "$$DOS_DB_UPDATE_DLQ_HANDLER_VERSION" ]] && echo "export DOS_DB_UPDATE_DLQ_HANDLER_VERSION=$$(echo $$VERSIONS | jq -r '.dos_db_update_dlq_handler')" [[ -z "$$EVENT_REPLAY_VERSION" ]] && echo "export EVENT_REPLAY_VERSION=$$(echo $$VERSIONS | jq -r '.event_replay')" [[ -z "$$INGEST_CHANGE_EVENT_VERSION" ]] && echo "export INGEST_CHANGE_EVENT_VERSION=$$(echo $$VERSIONS | jq -r '.ingest_change_event')" - [[ -z "$$SEND_EMAIL_VERSION" ]] && echo "export SEND_EMAIL_VERSION=$$(echo $$VERSIONS | jq -r '.send_email')" [[ -z "$$SERVICE_MATCHER_VERSION" ]] && echo "export SERVICE_MATCHER_VERSION=$$(echo $$VERSIONS | jq -r '.service_matcher')" [[ -z "$$SERVICE_SYNC_VERSION" ]] && echo "export SERVICE_SYNC_VERSION=$$(echo $$VERSIONS | jq -r '.service_sync')" [[ -z "$$SLACK_MESSENGER_VERSION" ]] && echo "export SLACK_MESSENGER_VERSION=$$(echo $$VERSIONS | jq -r '.slack_messenger')" diff --git a/README.md b/README.md index 7b3d41dd6..9d40474ce 100644 --- a/README.md +++ b/README.md @@ -231,7 +231,7 @@ Integration Testing is used to test the functional capabilities of the individua This testing includes: -- No Mocking. Except Emails which are mocked in NonProd +- No Mocking. - Check data when passed between components - Meets business needs of the application diff --git a/application/send_email/__init__.py b/application/send_email/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/application/send_email/requirements.txt b/application/send_email/requirements.txt deleted file mode 100644 index cfb1c4805..000000000 --- a/application/send_email/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -aws-lambda-powertools[tracer] ~= 3.20.0 diff --git a/application/send_email/send_email.py b/application/send_email/send_email.py deleted file mode 100644 index 6a19538dc..000000000 --- a/application/send_email/send_email.py +++ /dev/null @@ -1,75 +0,0 @@ -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from os import environ -from smtplib import SMTP, SMTPException - -from aws_lambda_powertools.logging import Logger -from aws_lambda_powertools.tracing import Tracer -from aws_lambda_powertools.utilities.typing import LambdaContext - -from common.middlewares import unhandled_exception_logging_hidden_event -from common.secretsmanager import get_secret -from common.types import EmailMessage - -tracer = Tracer() -logger = Logger() - - -@tracer.capture_lambda_handler() -@unhandled_exception_logging_hidden_event -@logger.inject_lambda_context(clear_state=True, correlation_id_path="correlation_id") -def lambda_handler(event: EmailMessage, context: LambdaContext) -> None: # noqa: ARG001 - """Entrypoint handler for the service_sync lambda. - - Args: - event (EmailMessage): Lambda function invocation event - context (LambdaContext): Lambda function context object - """ - logger.append_keys(user_id=event["user_id"], change_id=event["change_id"], s3_filename=event["s3_filename"]) - logger.info("Starting send_email lambda") - send_email( - email_address=event["recipient_email_address"], - html_content=event["email_body"], - subject=event["email_subject"], - correlation_id=event["correlation_id"], - ) - - -def send_email(email_address: str, html_content: str, subject: str, correlation_id: str) -> None: - """Send an email to the specified email address. - - Args: - email_address (str): Email address to send the email to - html_content (str): HTML content of the email - subject (str): Subject of the email - correlation_id (str): Correlation ID of the email - """ - aws_account_name = environ["AWS_ACCOUNT_NAME"] - if aws_account_name != "nonprod" or "email" in correlation_id: - logger.info("Preparing to send email") - email_secrets = get_secret(environ["EMAIL_SECRET_NAME"]) - to_email_address = email_address - di_system_email_address = email_secrets["DI_SYSTEM_MAILBOX_ADDRESS"] - di_system_email_password = email_secrets["DI_SYSTEM_MAILBOX_PASSWORD"] - msg = MIMEMultipart("alternative") - msg["Subject"] = subject - msg.attach(MIMEText(html_content, "html")) - logger.info("Email content prepared") - try: - # Don't log any variables that contain PID or password - smtp = SMTP(host="smtp.office365.com", port=587, timeout=15) - logger.info("Connected to SMTP server") - smtp.ehlo() - logger.info("Sent EHLO") - smtp.starttls() - logger.info("Started TLS") - smtp.login(di_system_email_address, di_system_email_password) - logger.info("Logged in to SMTP server") - smtp.sendmail(from_addr=di_system_email_address, to_addrs=[to_email_address], msg=msg.as_string()) - logger.warning("Sent email", cloudwatch_metric_filter_matching_attribute="EmailSent") - smtp.quit() - logger.info("Disconnected from SMTP server") - except BaseException: - logger.exception("Email failed", cloudwatch_metric_filter_matching_attribute="EmailFailed") - msg = "An error occurred while sending the email" - raise SMTPException(msg) from None diff --git a/application/send_email/tests/__init__.py b/application/send_email/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/application/send_email/tests/test_send_email.py b/application/send_email/tests/test_send_email.py deleted file mode 100644 index aa1abb91e..000000000 --- a/application/send_email/tests/test_send_email.py +++ /dev/null @@ -1,149 +0,0 @@ -from os import environ -from smtplib import SMTPException -from unittest.mock import MagicMock, patch - -import pytest -from aws_lambda_powertools.utilities.typing import LambdaContext - -from application.send_email.send_email import lambda_handler, send_email -from common.types import EmailMessage - -FILE_PATH = "application.send_email.send_email" -BUCKET = "bucket" -KEY = "key" -CORRELATION_ID = "correlation_id" -RECIPIENT_EMAIL_ADDRESS = "recipient_email_address" -EMAIL_BODY = "This is the email body" -EMAIL_SUBJECT = "Subject of email" -EVENT = EmailMessage( - correlation_id=CORRELATION_ID, - recipient_email_address=RECIPIENT_EMAIL_ADDRESS, - email_body=EMAIL_BODY, - email_subject=EMAIL_SUBJECT, - user_id="user_id", - change_id="change_id", - s3_filename="s3_filename", -) - - -@patch(f"{FILE_PATH}.send_email") -def test_lambda_handler(mock_send_email: MagicMock, lambda_context: LambdaContext) -> None: - # Arrange - event = EVENT.copy() - # Act - response = lambda_handler(event, lambda_context) - # Assert - assert response is None - mock_send_email.assert_called_once_with( - email_address=event["recipient_email_address"], - html_content=event["email_body"], - subject=event["email_subject"], - correlation_id=event["correlation_id"], - ) - - -@patch(f"{FILE_PATH}.MIMEMultipart") -@patch(f"{FILE_PATH}.SMTP") -@patch(f"{FILE_PATH}.get_secret") -def test_send_email( - mock_get_secret: MagicMock, - mock_smtp: MagicMock, - mock_mime_multipart: MagicMock, -) -> None: - # Arrange - environ["AWS_ACCOUNT_NAME"] = "test" - environ["EMAIL_SECRET_NAME"] = secret_name = "mock_secret_name" - di_team_mailbox_address = "di_team_mailbox_address" - di_system_mailbox_address = "di_system_mailbox_address" - di_system_mailbox_password = "di_system_mailbox_password" - mock_get_secret.return_value = { - "DI_TEAM_MAILBOX_ADDRESS": di_team_mailbox_address, - "DI_SYSTEM_MAILBOX_ADDRESS": di_system_mailbox_address, - "DI_SYSTEM_MAILBOX_PASSWORD": di_system_mailbox_password, - } - # Act - response = send_email( - email_address=RECIPIENT_EMAIL_ADDRESS, - html_content=EMAIL_BODY, - subject=EMAIL_SUBJECT, - correlation_id=CORRELATION_ID, - ) - # Assert - assert response is None - mock_get_secret.assert_called_once_with(secret_name) - mock_smtp.assert_called_once_with(host="smtp.office365.com", port=587, timeout=15) - mock_smtp.return_value.ehlo.assert_called_once() - mock_smtp.return_value.starttls.assert_called_once() - mock_smtp.return_value.login.assert_called_once_with(di_system_mailbox_address, di_system_mailbox_password) - mock_smtp.return_value.sendmail.assert_called_once_with( - from_addr=di_system_mailbox_address, - to_addrs=[RECIPIENT_EMAIL_ADDRESS], - msg=mock_mime_multipart.return_value.as_string.return_value, - ) - mock_smtp.return_value.quit.assert_called_once() - # Clean up - del environ["AWS_ACCOUNT_NAME"] - del environ["EMAIL_SECRET_NAME"] - - -@patch(f"{FILE_PATH}.MIMEMultipart") -@patch(f"{FILE_PATH}.SMTP") -@patch(f"{FILE_PATH}.get_secret") -def test_send_email_nonprod(mock_get_secret: MagicMock, mock_smtp: MagicMock, mock_mime_multipart: MagicMock) -> None: - # Arrange - environ["AWS_ACCOUNT_NAME"] = "nonprod" - # Act - response = send_email( - email_address=RECIPIENT_EMAIL_ADDRESS, - html_content=EMAIL_BODY, - subject=EMAIL_SUBJECT, - correlation_id=CORRELATION_ID, - ) - # Assert - assert response is None - mock_get_secret.assert_not_called() - mock_smtp.assert_not_called() - mock_mime_multipart.assert_not_called() - # Clean up - del environ["AWS_ACCOUNT_NAME"] - - -@patch(f"{FILE_PATH}.MIMEMultipart") -@patch(f"{FILE_PATH}.SMTP") -@patch(f"{FILE_PATH}.get_secret") -def test_send_email_exception( - mock_get_secret: MagicMock, - mock_smtp: MagicMock, - mock_mime_multipart: MagicMock, -) -> None: - # Arrange - environ["AWS_ACCOUNT_NAME"] = "test" - environ["EMAIL_SECRET_NAME"] = secret_name = "mock_secret_name" - di_team_mailbox_address = "di_team_mailbox_address" - di_system_mailbox_address = "di_system_mailbox_address" - di_system_mailbox_password = "di_system_mailbox_password" - mock_get_secret.return_value = { - "DI_TEAM_MAILBOX_ADDRESS": di_team_mailbox_address, - "DI_SYSTEM_MAILBOX_ADDRESS": di_system_mailbox_address, - "DI_SYSTEM_MAILBOX_PASSWORD": di_system_mailbox_password, - } - mock_smtp.return_value.ehlo.side_effect = SMTPException() - # Act - with pytest.raises(SMTPException, match="An error occurred while sending the email"): - send_email( - email_address=RECIPIENT_EMAIL_ADDRESS, - html_content=EMAIL_BODY, - subject=EMAIL_SUBJECT, - correlation_id=CORRELATION_ID, - ) - # Assert - mock_get_secret.assert_called_once_with(secret_name) - mock_smtp.assert_called_once_with(host="smtp.office365.com", port=587, timeout=15) - mock_smtp.return_value.ehlo.assert_called_once() - mock_smtp.return_value.starttls.assert_not_called() - mock_smtp.return_value.login.assert_not_called() - mock_smtp.return_value.sendmail.assert_not_called() - mock_smtp.return_value.quit.assert_not_called() - # Clean up - del environ["AWS_ACCOUNT_NAME"] - del environ["EMAIL_SECRET_NAME"] diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index fa813f06b..3e129f3db 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -12,7 +12,6 @@ from psycopg.rows import DictRow from ..service_update_logger import ServiceUpdateLogger -from .s3 import put_content_to_s3 from common.constants import DI_CHANGE_ITEMS, DOS_INTEGRATION_USER_NAME from common.dos_db_connection import connect_to_db_writer, query_dos_db from common.types import EmailFile, EmailMessage @@ -91,8 +90,7 @@ def check_and_remove_pending_dos_changes(service_id: str) -> None: reject_pending_changes(connection=connection, pending_changes=pending_changes) connection.commit() log_rejected_changes(pending_changes) - send_rejection_emails(pending_changes) - logger.info("All pending changes rejected and emails sent") + logger.info("All pending changes rejected") else: logger.info("No valid pending changes found") @@ -173,85 +171,4 @@ def log_rejected_changes(pending_changes: list[PendingChange]) -> None: type_id=pending_change.typeid, odscode="", ).log_rejected_change(pending_change.id) - - -def send_rejection_emails(pending_changes: list[PendingChange]) -> None: - """Sends rejection emails to the users who created the pending changes. - - Args: - pending_changes (List[PendingChange]): The pending changes to send rejection emails for - """ - subject = "Your DoS Change has been rejected" - for pending_change in pending_changes: - file_name = f"rejection-emails/rejection-email-{time_ns()}.json" - file_contents = build_change_rejection_email_contents(pending_change, file_name) - correlation_id: str = logger.get_correlation_id() - email_file = EmailFile( - correlation_id=correlation_id, - email_body=file_contents, - email_subject=subject, - user_id=pending_change.user_id, - ) - logger.info("Email file created", subject=subject, user_id=pending_change.user_id) - put_content_to_s3(content=dumps(email_file), s3_filename=file_name) - logger.info("File contents uploaded to S3") - file_contents = file_contents.replace("{{InitiatorName}}", pending_change.creatorsname) - message = EmailMessage( - change_id=pending_change.id, - correlation_id=correlation_id, - email_body=file_contents, - email_subject=subject, - recipient_email_address=pending_change.email, - s3_filename=file_name, - user_id=pending_change.user_id, - ) - logger.debug("Email message created") - client("lambda").invoke( - FunctionName=environ["SEND_EMAIL_LAMBDA"], - InvocationType="Event", - Payload=dumps(message), - ) - logger.info("Send email lambda invoked") - - -def build_change_rejection_email_contents(pending_change: PendingChange, file_name: str) -> str: - """Builds the contents of the change rejection email. - - Args: - pending_change (PendingChange): The pending change to build the email for - file_name (str): The name of the file to upload to S3 - - Returns: - str: The contents of the email - """ - with open("service_sync/reject_pending_changes/rejection-email.html") as email_template: - file_contents = email_template.read() - email_template.close() - email_correlation_id = f"{pending_change.uid}-{time_ns()}" - file_contents = file_contents.replace("{{ServiceName}}", pending_change.name) - file_contents = file_contents.replace("{{ServiceUid}}", pending_change.uid) - file_contents = file_contents.replace("{{EmailCorrelationId}}", email_correlation_id) - file_contents = file_contents.replace("{{DiTeamEmail}}", environ.get("TEAM_EMAIL_ADDRESS", "")) - logger.info("Email Correlation Id", email_correlation_id=email_correlation_id, file_name=file_name) - json_value = loads(pending_change.value) - for change_key, value in json_value["new"].items(): - # Add a new change row to the table in the email - row = TABLE_ROW - row = row.replace("{{change_key}}", change_key) - row = row.replace("{{previous}}", str(value.get("previous"))) - row = row.replace("{{new}}", str(value.get("data"))) - file_contents = file_contents.replace("{{row}}", row) - # Remove the placeholder row - file_contents = file_contents.replace("{{row}}", " ") - # Remove the \n characters from the HTML - return file_contents.replace("\n", " ") - - -TABLE_ROW: str = """ - - {{change_key}} - {{previous}} - {{new}} - - {{row}} - """ + \ No newline at end of file diff --git a/application/service_sync/reject_pending_changes/rejection-email.html b/application/service_sync/reject_pending_changes/rejection-email.html deleted file mode 100644 index cdf0025b7..000000000 --- a/application/service_sync/reject_pending_changes/rejection-email.html +++ /dev/null @@ -1,52 +0,0 @@ - - - - DoS Change Rejection Email - - - - -

Dear {{InitiatorName}},

-
- -

A change that you made in the DoS has been rejected automatically. This happened because there was a conflict with a new change from DoS Integration.

-

The service is: {{ServiceName}}

-

The service uid: {{ServiceUid}}

-
-

Your rejected change was:

- - - - - - - - - - - - - {{row}} - -
Change KeyData before requested changeProposed Change
- -

Please visit DoS to view the new change. If no changes are present DoS Integration considers DoS data to be correct

-
-

Your DoS Integration Email Correlation Id is: {{EmailCorrelationId}}

-
-

Kind regards,

-

DoS Integration

-

(Please do not reply to this email as the mailbox is not monitored. If you have any queries then please email {{DiTeamEmail}} directly)

- - diff --git a/application/service_sync/reject_pending_changes/s3.py b/application/service_sync/reject_pending_changes/s3.py deleted file mode 100644 index 86de8095b..000000000 --- a/application/service_sync/reject_pending_changes/s3.py +++ /dev/null @@ -1,25 +0,0 @@ -from os import getenv - -from aws_lambda_powertools.logging import Logger -from boto3 import client - -logger = Logger(child=True) - - -def put_content_to_s3(content: bytes, s3_filename: str) -> None: - """Upload a file contents to S3. - - Args: - content (bytes): File contents - s3_filename (str): The filename when the file is stored in S3 - """ - bucket = getenv("SEND_EMAIL_BUCKET_NAME") - aws_account_id = getenv("AWS_ACCOUNT_ID") - client("s3").put_object( - Body=content, - Bucket=bucket, - Key=s3_filename, - ServerSideEncryption="AES256", - ExpectedBucketOwner=aws_account_id, - ) - logger.info(f"Uploaded to S3 as {s3_filename}", bucket=bucket, s3_filename=s3_filename) diff --git a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py index 8530696c4..c0d5271bf 100644 --- a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py +++ b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py @@ -8,12 +8,10 @@ from application.service_sync.reject_pending_changes.pending_changes import ( PendingChange, - build_change_rejection_email_contents, check_and_remove_pending_dos_changes, get_pending_changes, log_rejected_changes, reject_pending_changes, - send_rejection_emails, ) FILE_PATH = "application.service_sync.reject_pending_changes.pending_changes" @@ -97,7 +95,6 @@ def test_pending_change_is_valid_exception() -> None: assert False is is_valid -@patch(f"{FILE_PATH}.send_rejection_emails") @patch(f"{FILE_PATH}.log_rejected_changes") @patch(f"{FILE_PATH}.reject_pending_changes") @patch(f"{FILE_PATH}.get_pending_changes") @@ -107,7 +104,6 @@ def test_check_and_remove_pending_dos_changes( mock_get_pending_changes: MagicMock, mock_reject_pending_changes: MagicMock, mock_log_rejected_changes: MagicMock, - mock_send_rejection_emails: MagicMock, ) -> None: # Arrange service_id = "test" @@ -126,10 +122,8 @@ def test_check_and_remove_pending_dos_changes( pending_changes=get_pending_changes_response, ) mock_log_rejected_changes.assert_called_once_with(get_pending_changes_response) - mock_send_rejection_emails.assert_called_once_with(get_pending_changes_response) -@patch(f"{FILE_PATH}.send_rejection_emails") @patch(f"{FILE_PATH}.log_rejected_changes") @patch(f"{FILE_PATH}.reject_pending_changes") @patch(f"{FILE_PATH}.get_pending_changes") @@ -139,7 +133,6 @@ def test_check_and_remove_pending_dos_changes_no_pending_changes( mock_get_pending_changes: MagicMock, mock_reject_pending_changes: MagicMock, mock_log_rejected_changes: MagicMock, - mock_send_rejection_emails: MagicMock, ) -> None: # Arrange service_id = "test" @@ -155,10 +148,8 @@ def test_check_and_remove_pending_dos_changes_no_pending_changes( ) mock_reject_pending_changes.assert_not_called() mock_log_rejected_changes.assert_not_called() - mock_send_rejection_emails.assert_not_called() -@patch(f"{FILE_PATH}.send_rejection_emails") @patch(f"{FILE_PATH}.log_rejected_changes") @patch(f"{FILE_PATH}.reject_pending_changes") @patch(f"{FILE_PATH}.get_pending_changes") @@ -168,7 +159,6 @@ def test_check_and_remove_pending_dos_changes_invalid_changes( mock_get_pending_changes: MagicMock, mock_reject_pending_changes: MagicMock, mock_log_rejected_changes: MagicMock, - mock_send_rejection_emails: MagicMock, ) -> None: # Arrange service_id = "test" @@ -184,7 +174,6 @@ def test_check_and_remove_pending_dos_changes_invalid_changes( ) mock_reject_pending_changes.assert_not_called() mock_log_rejected_changes.assert_not_called() - mock_send_rejection_emails.assert_not_called() @patch(f"{FILE_PATH}.PendingChange.__repr__") @@ -327,78 +316,3 @@ def test_log_rejected_changes(capsys: pytest.CaptureFixture) -> None: "execution_time=NULL" ) in captured.err - -@patch(f"{FILE_PATH}.client") -@patch(f"{FILE_PATH}.EmailMessage") -@patch(f"{FILE_PATH}.build_change_rejection_email_contents") -@patch(f"{FILE_PATH}.time_ns") -@patch(f"{FILE_PATH}.dumps") -@patch("builtins.open") -@patch(f"{FILE_PATH}.put_content_to_s3") -def test_send_rejection_emails( - mock_put_content_to_s3: MagicMock, - mock_open: MagicMock, - mock_dumps: MagicMock, - mock_time_ns: MagicMock, - mock_build_change_rejection_email_contents: MagicMock, - mock_email_message: MagicMock, - mock_client: MagicMock, -) -> None: - # Arrange - environ["SEND_EMAIL_LAMBDA"] = send_email_lambda_name = "test" - pending_change = PendingChange(ROW) - pending_changes = [pending_change] - mock_build_change_rejection_email_contents.return_value = file_contents = "test" - expected_subject = "Your DoS Change has been rejected" - # Act - response = send_rejection_emails(pending_changes) - # Assert - assert None is response - mock_dumps.assert_has_calls( - calls=[ - call( - { - "correlation_id": None, - "user_id": pending_change.user_id, - "email_body": mock_build_change_rejection_email_contents.return_value, - "email_subject": expected_subject, - }, - ), - call(mock_email_message.return_value), - ], - ) - mock_put_content_to_s3.assert_called_once_with( - content=mock_dumps.return_value, - s3_filename=f"rejection-emails/rejection-email-{mock_time_ns.return_value}.json", - ) - mock_email_message.assert_called_once_with( - change_id=pending_change.id, - correlation_id=None, - recipient_email_address=pending_change.email, - email_body=file_contents, - email_subject=expected_subject, - s3_filename=f"rejection-emails/rejection-email-{mock_time_ns.return_value}.json", - user_id=pending_change.user_id, - ) - mock_client.assert_called_once_with("lambda") - mock_client.return_value.invoke.assert_called_once_with( - FunctionName=send_email_lambda_name, - InvocationType="Event", - Payload=mock_dumps.return_value, - ) - # Cleanup - del environ["SEND_EMAIL_LAMBDA"] - - -@patch("builtins.open") -def test_build_change_rejection_email_contents(mock_open: MagicMock) -> None: - # Arrange - pending_change = PendingChange(ROW) - pending_change.value = '{"new":{"cmsurl":{"previous":"test.com","data":"https://www.test.com"}}}' - # Act - response = build_change_rejection_email_contents(pending_change, "test_file") - # Assert - assert ( - response - == mock_open.return_value.__enter__.return_value.read.return_value.replace.return_value.replace.return_value.replace.return_value.replace.return_value.replace.return_value.replace.return_value.replace.return_value # noqa: E501 - ) diff --git a/application/service_sync/reject_pending_changes/tests/test_s3.py b/application/service_sync/reject_pending_changes/tests/test_s3.py deleted file mode 100644 index 17c1bccd1..000000000 --- a/application/service_sync/reject_pending_changes/tests/test_s3.py +++ /dev/null @@ -1,29 +0,0 @@ -from os import environ -from unittest.mock import MagicMock, patch - -from application.service_sync.reject_pending_changes.s3 import put_content_to_s3 - -FILE_PATH = "application.service_sync.reject_pending_changes.s3" - - -@patch(f"{FILE_PATH}.client") -def test_put_content_to_s3(mock_client: MagicMock) -> None: - # Arrange - environ["SEND_EMAIL_BUCKET_NAME"] = bucket_name = "bucket_name" - environ["AWS_ACCOUNT_ID"] = aws_account_id = "123456789012" - s3_filename = "s3_filename" - content = b"content" - # Act - put_content_to_s3(content, s3_filename) - # Assert - mock_client.assert_called_once_with("s3") - mock_client.return_value.put_object.assert_called_once_with( - Body=content, - Bucket=bucket_name, - Key=s3_filename, - ServerSideEncryption="AES256", - ExpectedBucketOwner=aws_account_id, - ) - # Cleanup - del environ["SEND_EMAIL_BUCKET_NAME"] - del environ["AWS_ACCOUNT_ID"] diff --git a/build/automation/var/project.mk b/build/automation/var/project.mk index 3a4961307..74495ad59 100644 --- a/build/automation/var/project.mk +++ b/build/automation/var/project.mk @@ -14,7 +14,7 @@ SERVICE_TAG = $(PROJECT_GROUP_SHORT) SERVICE_TAG_COMMON = texas PROJECT_TECH_STACK_LIST = python,terraform -PROJECT_LAMBDAS_LIST = $(CHANGE_EVENT_DLQ_HANDLER),$(DOS_DB_HANDLER),$(DOS_DB_UPDATE_DLQ_HANDLER),$(EVENT_REPLAY),$(INGEST_CHANGE_EVENT),$(SEND_EMAIL),$(SERVICE_MATCHER),$(SERVICE_SYNC),$(SLACK_MESSENGER),$(QUALITY_CHECKER) +PROJECT_LAMBDAS_LIST = $(CHANGE_EVENT_DLQ_HANDLER),$(DOS_DB_HANDLER),$(DOS_DB_UPDATE_DLQ_HANDLER),$(EVENT_REPLAY),$(INGEST_CHANGE_EVENT),$(SERVICE_MATCHER),$(SERVICE_SYNC),$(SLACK_MESSENGER),$(QUALITY_CHECKER) AWS_VPC_NAME = lk8s-$(AWS_ACCOUNT_NAME).texasplatform.uk TF_VAR_aws_vpc_name = $(AWS_VPC_NAME) @@ -82,7 +82,6 @@ DOS_DB_HANDLER := dos-db-handler DOS_DB_UPDATE_DLQ_HANDLER := dos-db-update-dlq-handler EVENT_REPLAY := event-replay INGEST_CHANGE_EVENT := ingest-change-event -SEND_EMAIL := send-email SERVICE_MATCHER := service-matcher SERVICE_SYNC := service-sync SLACK_MESSENGER := slack-messenger @@ -164,7 +163,6 @@ TF_VAR_dos_db_handler := $(DOS_DB_HANDLER) TF_VAR_dos_db_update_dlq_handler := $(DOS_DB_UPDATE_DLQ_HANDLER) TF_VAR_event_replay := $(EVENT_REPLAY) TF_VAR_ingest_change_event := $(INGEST_CHANGE_EVENT) -TF_VAR_send_email := $(SEND_EMAIL) TF_VAR_service_matcher := $(SERVICE_MATCHER) TF_VAR_service_sync := $(SERVICE_SYNC) TF_VAR_slack_messenger := $(SLACK_MESSENGER) @@ -176,7 +174,6 @@ DOS_DB_HANDLER_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(DOS_DB_HANDLE DOS_DB_UPDATE_DLQ_HANDLER_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(DOS_DB_UPDATE_DLQ_HANDLER) EVENT_REPLAY_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(EVENT_REPLAY) INGEST_CHANGE_EVENT_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(INGEST_CHANGE_EVENT) -SEND_EMAIL_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(SEND_EMAIL) SERVICE_MATCHER_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(SERVICE_MATCHER) SERVICE_SYNC_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(SERVICE_SYNC) SLACK_MESSENGER_LAMBDA := $(PROJECT_ID)-$(BLUE_GREEN_ENVIRONMENT)-$(SLACK_MESSENGER) @@ -187,7 +184,6 @@ TF_VAR_dos_db_handler_lambda := $(DOS_DB_HANDLER_LAMBDA) TF_VAR_dos_db_update_dlq_handler_lambda := $(DOS_DB_UPDATE_DLQ_HANDLER_LAMBDA) TF_VAR_event_replay_lambda := $(EVENT_REPLAY_LAMBDA) TF_VAR_ingest_change_event_lambda := $(INGEST_CHANGE_EVENT_LAMBDA) -TF_VAR_send_email_lambda := $(SEND_EMAIL_LAMBDA) TF_VAR_service_matcher_lambda := $(SERVICE_MATCHER_LAMBDA) TF_VAR_service_sync_lambda := $(SERVICE_SYNC_LAMBDA) TF_VAR_slack_messenger_lambda := $(SLACK_MESSENGER_LAMBDA) @@ -199,7 +195,6 @@ TF_VAR_dos_db_handler_version := $(or $(DOS_DB_HANDLER_VERSION), $(VERSION)) TF_VAR_dos_db_update_dlq_handler_version := $(or $(DOS_DB_UPDATE_DLQ_HANDLER_VERSION), $(VERSION)) TF_VAR_event_replay_version := $(or $(EVENT_REPLAY_VERSION), $(VERSION)) TF_VAR_ingest_change_event_version := $(or $(INGEST_CHANGE_EVENT_VERSION), $(VERSION)) -TF_VAR_send_email_version := $(or $(SEND_EMAIL_VERSION), $(VERSION)) TF_VAR_service_matcher_version := $(or $(SERVICE_MATCHER_VERSION), $(VERSION)) TF_VAR_service_sync_version := $(or $(SERVICE_SYNC_VERSION), $(VERSION)) TF_VAR_slack_messenger_version := $(or $(SLACK_MESSENGER_VERSION), $(VERSION)) @@ -210,7 +205,6 @@ TF_VAR_dos_db_handler_role := $(DOS_DB_HANDLER_LAMBDA_ROLE_NAME) TF_VAR_dos_db_update_dlq_handler_role := $(DOS_DB_UPDATE_DLQ_HANDLER_LAMBDA_ROLE_NAME) TF_VAR_event_replay_role := $(EVENT_REPLAY_LAMBDA_ROLE_NAME) TF_VAR_ingest_change_event_role := $(INGEST_CHANGE_EVENT_LAMBDA_ROLE_NAME) -TF_VAR_send_email_role := $(SEND_EMAIL_LAMBDA_ROLE_NAME) TF_VAR_service_matcher_role := $(SERVICE_MATCHER_LAMBDA_ROLE_NAME) TF_VAR_service_sync_role := $(SERVICE_SYNC_LAMBDA_ROLE_NAME) TF_VAR_slack_messenger_role := $(SLACK_MESSENGER_LAMBDA_ROLE_NAME) @@ -221,7 +215,6 @@ TF_VAR_change_event_dlq_handler_subscription_filter_name := $(CHANGE_EVENT_DLQ_H TF_VAR_dos_db_update_dlq_handler_subscription_filter_name := $(DOS_DB_HANDLER_LAMBDA)-cw-logs-firehose-subscription TF_VAR_event_replay_subscription_filter_name := $(EVENT_REPLAY_LAMBDA)-cw-logs-firehose-subscription TF_VAR_ingest_change_event_subscription_filter_name := $(INGEST_CHANGE_EVENT_LAMBDA)-cw-logs-firehose-subscription -TF_VAR_send_email_subscription_filter_name := $(SEND_EMAIL_LAMBDA)-cw-logs-firehose-subscription TF_VAR_service_matcher_subscription_filter_name := $(SERVICE_MATCHER_LAMBDA)-cw-logs-firehose-subscription TF_VAR_service_sync_di_subscription_filter_name := $(SERVICE_SYNC_LAMBDA)-di-cw-logs-firehose-subscription TF_VAR_service_sync_dos_subscription_filter_name := $(SERVICE_SYNC_LAMBDA)-dos-cw-logs-firehose-subscription diff --git a/infrastructure/stacks/application/cloudwatch-metrics.tf b/infrastructure/stacks/application/cloudwatch-metrics.tf index c2a6b8a49..18811af44 100644 --- a/infrastructure/stacks/application/cloudwatch-metrics.tf +++ b/infrastructure/stacks/application/cloudwatch-metrics.tf @@ -89,36 +89,6 @@ resource "aws_cloudwatch_log_metric_filter" "quality_checker_errored" { } } -resource "aws_cloudwatch_log_metric_filter" "email_sent" { - name = "${var.project_id}-${var.blue_green_environment}-email-sent" - pattern = "{ $.cloudwatch_metric_filter_matching_attribute = \"EmailSent\" }" - log_group_name = module.send_email_lambda.lambda_cloudwatch_log_group_name - - metric_transformation { - name = "EmailSent" - namespace = "uec-dos-int" - value = "1" - dimensions = { - environment = "$.environment" - } - } -} - -resource "aws_cloudwatch_log_metric_filter" "email_failed" { - name = "${var.project_id}-${var.blue_green_environment}-email-failed" - pattern = "{ $.cloudwatch_metric_filter_matching_attribute = \"EmailFailed\" }" - log_group_name = module.send_email_lambda.lambda_cloudwatch_log_group_name - - metric_transformation { - name = "EmailFailed" - namespace = "uec-dos-int" - value = "1" - dimensions = { - environment = "$.environment" - } - } -} - resource "aws_cloudwatch_log_metric_filter" "invalid_open_times" { name = "${var.project_id}-${var.blue_green_environment}-invalid-open-times" pattern = "{ $.cloudwatch_metric_filter_matching_attribute = \"InvalidOpenTimes\" }" diff --git a/infrastructure/stacks/application/iam.tf b/infrastructure/stacks/application/iam.tf index b2b87377c..97652b3dd 100644 --- a/infrastructure/stacks/application/iam.tf +++ b/infrastructure/stacks/application/iam.tf @@ -206,37 +206,6 @@ data "aws_iam_policy_document" "quality_checker_policy" { } } -data "aws_iam_policy_document" "send_email_policy" { - statement { - effect = "Allow" - actions = [ - "kms:Decrypt", - ] - resources = [ - data.aws_kms_key.signing_key.arn, - ] - } - statement { - effect = "Allow" - actions = [ - "s3:GetObject", - ] - resources = [ - "arn:aws:s3:::${var.send_email_bucket_name}", - "arn:aws:s3:::${var.send_email_bucket_name}/*", - ] - } - statement { - effect = "Allow" - actions = [ - "secretsmanager:GetSecretValue", - ] - resources = [ - "arn:aws:secretsmanager:${var.aws_region}:${var.aws_account_id}:secret:${var.project_deployment_secrets}", - ] - } -} - data "aws_iam_policy_document" "service_matcher_policy" { statement { effect = "Allow" @@ -349,24 +318,6 @@ data "aws_iam_policy_document" "service_sync_policy" { "arn:aws:dynamodb:${var.aws_region}:${var.aws_account_id}:table/${var.change_events_table_name}/index/gsi_ods_sequence", ] } - statement { - effect = "Allow" - actions = [ - "s3:PutObject", - ] - resources = [ - "arn:aws:s3:::${var.send_email_bucket_name}/*", - ] - } - statement { - effect = "Allow" - actions = [ - "lambda:InvokeFunction", - ] - resources = [ - "arn:aws:lambda:${var.aws_region}:${var.aws_account_id}:function:${var.send_email_lambda}", - ] - } } data "aws_iam_policy_document" "slack_messenger_policy" { diff --git a/infrastructure/stacks/application/lambda.tf b/infrastructure/stacks/application/lambda.tf index 82e64d6e8..5f7ea3cb7 100644 --- a/infrastructure/stacks/application/lambda.tf +++ b/infrastructure/stacks/application/lambda.tf @@ -214,48 +214,6 @@ module "ingest_change_event_lambda" { } } -module "send_email_lambda" { - source = "terraform-aws-modules/lambda/aws" - version = "v8.1.0" - - function_name = var.send_email_lambda - description = "Send Email lambda" - - create_package = false - image_uri = "${var.docker_registry}/${var.send_email}:${var.send_email_version}" - package_type = "Image" - timeout = 30 - memory_size = 128 - architectures = ["arm64"] - kms_key_arn = data.aws_kms_key.signing_key.arn - tracing_mode = "Active" - maximum_retry_attempts = 2 - - cloudwatch_logs_kms_key_id = data.aws_kms_key.signing_key.arn - cloudwatch_logs_retention_in_days = 30 - - role_name = "${var.send_email_lambda}-role" - role_description = "Role for Lambda function ${var.send_email_lambda}" - - attach_policy_json = true - policy_json = data.aws_iam_policy_document.send_email_policy.json - - environment_variables = { - "PROFILE" = var.profile - "ENVIRONMENT" = var.blue_green_environment - "SHARED_ENVIRONMENT" = var.shared_environment - "POWERTOOLS_SERVICE_NAME" = var.lambda_powertools_service_name - "POWERTOOLS_TRACER_CAPTURE_RESPONSE" = true - "POWERTOOLS_TRACER_CAPTURE_ERROR" = true - "POWERTOOLS_TRACE_MIDDLEWARES" = true - "LOG_LEVEL" = var.log_level - "IMAGE_VERSION" = var.send_email_version - "AWS_ACCOUNT_NAME" = var.aws_account_name - "SYSTEM_EMAIL_ADDRESS" = local.project_system_email_address - "EMAIL_SECRET_NAME" = var.project_deployment_secrets - } -} - module "service_matcher_lambda" { source = "terraform-aws-modules/lambda/aws" version = "v8.1.0" @@ -362,10 +320,8 @@ module "service_sync_lambda" { "DB_WRITER_SECRET_NAME" = var.dos_db_writer_secret_name "DB_WRITER_SECRET_KEY" = var.dos_db_writer_secret_key "DB_READ_AND_WRITE_USER_NAME" = local.dos_db_read_and_write_user_name - "SEND_EMAIL_BUCKET_NAME" = var.send_email_bucket_name "TEAM_EMAIL_ADDRESS" = local.project_team_email_address "SYSTEM_EMAIL_ADDRESS" = local.project_system_email_address - "SEND_EMAIL_LAMBDA_NAME" = var.send_email_lambda "AWS_ACCOUNT_ID" = tostring(var.aws_account_id) } } diff --git a/infrastructure/stacks/application/outputs.tf b/infrastructure/stacks/application/outputs.tf index f37111580..a885e1d74 100644 --- a/infrastructure/stacks/application/outputs.tf +++ b/infrastructure/stacks/application/outputs.tf @@ -5,7 +5,6 @@ output "lambda_versions" { "dos_db_update_dlq_handler" = var.dos_db_update_dlq_handler_version "event_replay" = var.event_replay_version "ingest_change_event" = var.ingest_change_event_version - "send_email" = var.send_email_version "service_matcher" = var.service_matcher_version "service_sync" = var.service_sync_version "slack_messenger" = var.slack_messenger_version diff --git a/infrastructure/stacks/application/splunk.tf b/infrastructure/stacks/application/splunk.tf index 63f5d40f1..9c40f2c91 100644 --- a/infrastructure/stacks/application/splunk.tf +++ b/infrastructure/stacks/application/splunk.tf @@ -30,14 +30,6 @@ resource "aws_cloudwatch_log_subscription_filter" "ingest_change_event_logs_subs destination_arn = data.aws_kinesis_firehose_delivery_stream.dos_integration_firehose.arn } -resource "aws_cloudwatch_log_subscription_filter" "send_email_logs_subscription_filter" { - name = var.send_email_subscription_filter_name - role_arn = data.aws_iam_role.di_firehose_role.arn - log_group_name = module.send_email_lambda.lambda_cloudwatch_log_group_name - filter_pattern = "{ $.level = \"ERROR\" || $.level = \"WARNING\" || $.level = \"CRITICAL\" }" - destination_arn = data.aws_kinesis_firehose_delivery_stream.dos_integration_firehose.arn -} - resource "aws_cloudwatch_log_subscription_filter" "service_matcher_logs_subscription_filter" { name = var.service_matcher_subscription_filter_name role_arn = data.aws_iam_role.di_firehose_role.arn diff --git a/infrastructure/stacks/application/variables.tf b/infrastructure/stacks/application/variables.tf index a64461651..9ea8325fc 100644 --- a/infrastructure/stacks/application/variables.tf +++ b/infrastructure/stacks/application/variables.tf @@ -177,13 +177,6 @@ variable "slack_messenger_subscription_filter_name" { description = "Log filter name for slack messenger lambda" } - -variable "send_email_subscription_filter_name" { - type = string - - description = "Log filter name for send email lambda" -} - variable "ingest_change_event_subscription_filter_name" { type = string description = "Log filter name for ingest change event lambda" @@ -243,11 +236,6 @@ variable "ingest_change_event_lambda" { description = "Name of ingest change event lambda" } -variable "send_email_lambda" { - type = string - description = "Name of send email lambda" -} - variable "service_matcher_lambda" { type = string description = "Name of event processor lambda" @@ -297,11 +285,6 @@ variable "ingest_change_event" { description = "Name of ingest change event docker image" } -variable "send_email" { - type = string - description = "Name of send email docker image" -} - variable "service_matcher" { type = string description = "Name of event processor docker image" @@ -352,11 +335,6 @@ variable "ingest_change_event_version" { description = "Version of ingest change event docker image" } -variable "send_email_version" { - type = string - description = "Version of send email docker image" -} - variable "service_matcher_version" { type = string description = "Version of event processor docker image" diff --git a/infrastructure/stacks/blue-green-link/cloudwatch-alarms-lambda-error-rate.tf b/infrastructure/stacks/blue-green-link/cloudwatch-alarms-lambda-error-rate.tf index 08ed39857..c10faeadf 100644 --- a/infrastructure/stacks/blue-green-link/cloudwatch-alarms-lambda-error-rate.tf +++ b/infrastructure/stacks/blue-green-link/cloudwatch-alarms-lambda-error-rate.tf @@ -186,53 +186,6 @@ resource "aws_cloudwatch_metric_alarm" "ingest_change_event_error_rate_alert" { } } -resource "aws_cloudwatch_metric_alarm" "send_email_error_rate_alert" { - alarm_actions = [data.aws_sns_topic.sns_topic_app_alerts_for_slack_default_region.arn] - alarm_description = "Send Email error rate has exceeded 1%" - alarm_name = "${var.project_id} | ${var.blue_green_environment} | Send Email Error Rate" - comparison_operator = "GreaterThanOrEqualToThreshold" - evaluation_periods = "2" - threshold = "1" - insufficient_data_actions = [] - treat_missing_data = "ignore" - ok_actions = var.profile == "dev" ? [] : [data.aws_sns_topic.sns_topic_app_alerts_for_slack_default_region.arn] - - metric_query { - id = "expression" - expression = "(errors/invocations) * 100" - label = "Error Rate (%)" - return_data = "true" - } - - metric_query { - id = "errors" - metric { - metric_name = "Errors" - namespace = "AWS/Lambda" - period = "120" - stat = "Sum" - unit = "Count" - dimensions = { - FunctionName = var.send_email_lambda - } - } - } - - metric_query { - id = "invocations" - metric { - metric_name = "Invocations" - namespace = "AWS/Lambda" - period = "120" - stat = "Sum" - unit = "Count" - dimensions = { - FunctionName = var.send_email_lambda - } - } - } -} - resource "aws_cloudwatch_metric_alarm" "service_matcher_error_rate_alert" { alarm_actions = [data.aws_sns_topic.sns_topic_app_alerts_for_slack_default_region.arn] alarm_description = "Service Matcher error rate has exceeded 1%" diff --git a/infrastructure/stacks/blue-green-link/cloudwatch-alarms.tf b/infrastructure/stacks/blue-green-link/cloudwatch-alarms.tf index 9a6520d1c..84e6e9bd9 100644 --- a/infrastructure/stacks/blue-green-link/cloudwatch-alarms.tf +++ b/infrastructure/stacks/blue-green-link/cloudwatch-alarms.tf @@ -100,23 +100,6 @@ resource "aws_cloudwatch_metric_alarm" "high_number_of_update_requests_waiting_a threshold = "30000" # 30 Seconds } -resource "aws_cloudwatch_metric_alarm" "high_number_of_failed_emails_alert" { - count = can(regex("ds-*", var.blue_green_environment)) ? 0 : 1 - alarm_actions = [data.aws_sns_topic.sns_topic_app_alerts_for_slack_default_region.arn] - alarm_description = "Alert for when DI is failing to send emails" - alarm_name = "${var.project_id} | ${var.blue_green_environment} | Failed Emails" - comparison_operator = "GreaterThanOrEqualToThreshold" - datapoints_to_alarm = "1" - dimensions = { environment = var.blue_green_environment } - evaluation_periods = "1" - insufficient_data_actions = [] - metric_name = "EmailFailed" - namespace = "uec-dos-int" - period = "120" # 2 minutes - statistic = "Sum" - threshold = "1" -} - resource "aws_cloudwatch_metric_alarm" "average_message_latency_alert" { count = can(regex("ds-*", var.blue_green_environment)) ? 0 : 1 alarm_actions = [data.aws_sns_topic.sns_topic_app_alerts_for_slack_default_region.arn] diff --git a/infrastructure/stacks/blue-green-link/cloudwatch-dashboards.tf b/infrastructure/stacks/blue-green-link/cloudwatch-dashboards.tf index f77f4095b..f1600c2e6 100644 --- a/infrastructure/stacks/blue-green-link/cloudwatch-dashboards.tf +++ b/infrastructure/stacks/blue-green-link/cloudwatch-dashboards.tf @@ -243,9 +243,6 @@ resource "aws_cloudwatch_dashboard" "cloudwatch_monitoring_dashboard" { ["AWS/Lambda", "Errors", "FunctionName", var.ingest_change_event_lambda, { "id" : "m41", "stat" : "Sum", "visible" : false }], [".", "Invocations", ".", var.ingest_change_event_lambda, { "id" : "m42", "stat" : "Sum", "visible" : false }], [{ "expression" : "(m41/m42) * 100", "label" : "Ingest Change Event", region : var.aws_region, "color" : "#d62728" }], - ["AWS/Lambda", "Errors", "FunctionName", var.send_email_lambda, { "id" : "m51", "stat" : "Sum", "visible" : false }], - [".", "Invocations", ".", var.send_email_lambda, { "id" : "m52", "stat" : "Sum", "visible" : false }], - [{ "expression" : "(m51/m52) * 100", "label" : "Send Email", region : var.aws_region, "color" : "#9467bd" }], ["AWS/Lambda", "Errors", "FunctionName", var.service_matcher_lambda, { "id" : "m61", "stat" : "Sum", "visible" : false }], [".", "Invocations", ".", var.service_matcher_lambda, { "id" : "m62", "stat" : "Sum", "visible" : false }], [{ "expression" : "(m61/m62) * 100", "label" : "Service Matcher", region : var.aws_region, "color" : "#8c564b" }], diff --git a/infrastructure/stacks/blue-green-link/variables.tf b/infrastructure/stacks/blue-green-link/variables.tf index ad17c6065..9001dd600 100755 --- a/infrastructure/stacks/blue-green-link/variables.tf +++ b/infrastructure/stacks/blue-green-link/variables.tf @@ -98,11 +98,6 @@ variable "ingest_change_event_lambda" { description = "Name of ingest change event lambda" } -variable "send_email_lambda" { - type = string - description = "Name of send email lambda" -} - variable "service_matcher_lambda" { type = string description = "Name of event processor lambda" diff --git a/infrastructure/stacks/cloudwatch-queries/cloudwatch-queries.tf b/infrastructure/stacks/cloudwatch-queries/cloudwatch-queries.tf index 5eda0427b..704c50d42 100644 --- a/infrastructure/stacks/cloudwatch-queries/cloudwatch-queries.tf +++ b/infrastructure/stacks/cloudwatch-queries/cloudwatch-queries.tf @@ -6,7 +6,6 @@ resource "aws_cloudwatch_query_definition" "search_for_errors" { "/aws/lambda/${var.dos_db_update_dlq_handler_lambda}", "/aws/lambda/${var.event_replay_lambda}", "/aws/lambda/${var.ingest_change_event_lambda}", - "/aws/lambda/${var.send_email_lambda}", "/aws/lambda/${var.service_matcher_lambda}", "/aws/lambda/${var.service_sync_lambda}" ] @@ -26,7 +25,6 @@ resource "aws_cloudwatch_query_definition" "search_by_correlation_id" { "/aws/lambda/${var.dos_db_update_dlq_handler_lambda}", "/aws/lambda/${var.event_replay_lambda}", "/aws/lambda/${var.ingest_change_event_lambda}", - "/aws/lambda/${var.send_email_lambda}", "/aws/lambda/${var.service_matcher_lambda}", "/aws/lambda/${var.service_sync_lambda}" ] @@ -46,7 +44,6 @@ resource "aws_cloudwatch_query_definition" "search_by_correlation_id_expanded" { "/aws/lambda/${var.dos_db_update_dlq_handler_lambda}", "/aws/lambda/${var.event_replay_lambda}", "/aws/lambda/${var.ingest_change_event_lambda}", - "/aws/lambda/${var.send_email_lambda}", "/aws/lambda/${var.service_matcher_lambda}", "/aws/lambda/${var.service_sync_lambda}" ] @@ -66,7 +63,6 @@ resource "aws_cloudwatch_query_definition" "search_by_odscode" { "/aws/lambda/${var.dos_db_update_dlq_handler_lambda}", "/aws/lambda/${var.event_replay_lambda}", "/aws/lambda/${var.ingest_change_event_lambda}", - "/aws/lambda/${var.send_email_lambda}", "/aws/lambda/${var.service_matcher_lambda}", "/aws/lambda/${var.service_sync_lambda}" ] @@ -86,7 +82,6 @@ resource "aws_cloudwatch_query_definition" "search_by_odscode_expanded" { "/aws/lambda/${var.dos_db_update_dlq_handler_lambda}", "/aws/lambda/${var.event_replay_lambda}", "/aws/lambda/${var.ingest_change_event_lambda}", - "/aws/lambda/${var.send_email_lambda}", "/aws/lambda/${var.service_matcher_lambda}", "/aws/lambda/${var.service_sync_lambda}" ] @@ -131,7 +126,6 @@ resource "aws_cloudwatch_query_definition" "search_by_email_correlation_id" { log_group_names = [ "/aws/lambda/${var.service_sync_lambda}", - "/aws/lambda/${var.send_email_lambda}" ] query_string = < Context: - """Get the email file from S3 bucket. - - Args: - context (Context): Test context - - Returns: - context (Context): Test context - """ - sleep(45) - email_file_name = "email_file.json" - shared_environment = getenv("SHARED_ENVIRONMENT") - aws_account_id = getenv("AWS_ACCOUNT_ID") - bucket_name = f"uec-dos-int-{shared_environment}-send-email-bucket" - response = S3_CLIENT.list_objects(Bucket=bucket_name, ExpectedBucketOwner=aws_account_id) - object_key = response["Contents"][-1]["Key"] - s3_resource = resource("s3") - s3_resource.meta.client.download_file( - bucket_name, object_key, email_file_name, ExtraArgs={"ExpectedBucketOwner": aws_account_id} - ) - with open(email_file_name) as email_file: - context.other = load(email_file) - remove("./email_file.json") - return context diff --git a/test/integration/steps/test_steps.py b/test/integration/steps/test_steps.py index 5130cc378..8ff7ef2ce 100644 --- a/test/integration/steps/test_steps.py +++ b/test/integration/steps/test_steps.py @@ -17,7 +17,6 @@ from .functions.aws.aws_lambda import invoke_quality_checker_lambda, re_process_payload from .functions.aws.cloudwatch import get_logs, negative_log_check from .functions.aws.dynamodb import get_latest_sequence_id_for_a_given_odscode, get_stored_events_from_dynamo_db -from .functions.aws.s3 import get_s3_email_file from .functions.aws.sqs import post_to_change_event_dlq, post_ur_fifo, post_ur_sqs from .functions.context import Context from .functions.dos.check_data import ( @@ -1606,17 +1605,6 @@ def services_location_history_update_assertion(context: Context) -> None: assert history_list == location_data, "ERROR: Service History and Location data does not match" -@then("the s3 bucket contains an email file matching the service uid") -def check_s3_contains_email_file(context: Context) -> None: - """Assert the s3 bucket contains an email file matching the service uid. - - Args: - context (Context): The context object. - """ - get_s3_email_file(context) - assert context.service_uid in context.other["email_body"], "ERROR: service_uid not found in email body" - - @then("the changes table shows change is now rejected") def check_changes_table_has_been_updated(context: Context) -> None: """Assert the changes table shows change is now rejected. From d7f44bb9e47da79e3b04ae9d6e1f576d8b0d22d8 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 12:24:16 +0000 Subject: [PATCH 2/8] Fixing broken integration test for retiring send email lambda --- .../features/F001_Valid_Change_Events.feature | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/test/integration/features/F001_Valid_Change_Events.feature b/test/integration/features/F001_Valid_Change_Events.feature index f4db2d5d5..36adfd280 100644 --- a/test/integration/features/F001_Valid_Change_Events.feature +++ b/test/integration/features/F001_Valid_Change_Events.feature @@ -143,16 +143,6 @@ Feature: F001. Ensure valid change events are converted and sent to DoS When the Changed Event is sent for processing with "valid" api key Then the service history table has been updated with locations data - @complete @general - Scenario: F001SX11. To check the emails sending - Given a basic service is created - And the correlation-id is "email" - And the change event "Address1" is set to "Test Address" - And a pending entry exists in the changes table for this service - When the Changed Event is sent for processing with "valid" api key - Then the s3 bucket contains an email file matching the service uid - And the changes table shows change is now rejected - @complete @opening_times Scenario: F001SX12. Past Specified Opening Times on Dos are removed and updated Given an entry is created in the services table From 4fed568ad9b6f8fa66d2221df8891fe8e5bba0ad Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 13:26:31 +0000 Subject: [PATCH 3/8] Fixed trailing while space issue in file --- .../service_sync/reject_pending_changes/pending_changes.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index 3e129f3db..2f2e1ed3b 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -170,5 +170,4 @@ def log_rejected_changes(pending_changes: list[PendingChange]) -> None: service_name=pending_change.name, type_id=pending_change.typeid, odscode="", - ).log_rejected_change(pending_change.id) - \ No newline at end of file + ).log_rejected_change(pending_change.id) \ No newline at end of file From f1ae6209714c971318c44f2ff50c3f4d26ee54c7 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 13:47:56 +0000 Subject: [PATCH 4/8] Ruff fixes --- .../service_sync/reject_pending_changes/pending_changes.py | 2 +- .../reject_pending_changes/tests/test_pending_changes.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index 2f2e1ed3b..80c783984 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -170,4 +170,4 @@ def log_rejected_changes(pending_changes: list[PendingChange]) -> None: service_name=pending_change.name, type_id=pending_change.typeid, odscode="", - ).log_rejected_change(pending_change.id) \ No newline at end of file + ).log_rejected_change(pending_change.id) diff --git a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py index c0d5271bf..c60cc030a 100644 --- a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py +++ b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py @@ -315,4 +315,3 @@ def test_log_rejected_changes(capsys: pytest.CaptureFixture) -> None: f"org_name={pending_change.name}|change_status=PENDING|info=change rejected|" "execution_time=NULL" ) in captured.err - From 1848e4f1b05f15e066adba5b24fdcb806c620bfa Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 13:56:15 +0000 Subject: [PATCH 5/8] Ruff fixes for unused imports --- .../service_sync/reject_pending_changes/pending_changes.py | 6 +----- .../reject_pending_changes/tests/test_pending_changes.py | 3 +-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index 80c783984..4be6314d9 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -1,20 +1,16 @@ from dataclasses import dataclass from datetime import datetime -from json import JSONDecodeError, dumps, loads -from os import environ -from time import time_ns +from json import JSONDecodeError, loads from typing import Self from zoneinfo import ZoneInfo from aws_lambda_powertools.logging import Logger -from boto3 import client from psycopg import Connection from psycopg.rows import DictRow from ..service_update_logger import ServiceUpdateLogger from common.constants import DI_CHANGE_ITEMS, DOS_INTEGRATION_USER_NAME from common.dos_db_connection import connect_to_db_writer, query_dos_db -from common.types import EmailFile, EmailMessage logger = Logger(child=True) diff --git a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py index c60cc030a..2bcba30d5 100644 --- a/application/service_sync/reject_pending_changes/tests/test_pending_changes.py +++ b/application/service_sync/reject_pending_changes/tests/test_pending_changes.py @@ -1,7 +1,6 @@ from json import dumps -from os import environ from random import choices -from unittest.mock import MagicMock, call, patch +from unittest.mock import MagicMock, patch from zoneinfo import ZoneInfo import pytest From fde436deb6723ed158fbc1753f4d28a5609fd5f7 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 14:39:52 +0000 Subject: [PATCH 6/8] Ruff fixes for dead code --- application/pyproject.toml | 1 + .../service_sync/reject_pending_changes/pending_changes.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/application/pyproject.toml b/application/pyproject.toml index c6d11ad45..a5ad2cb2a 100644 --- a/application/pyproject.toml +++ b/application/pyproject.toml @@ -16,4 +16,5 @@ ignore_names = [ "metadata", "recipient_email_address", "recipient_id", + "unhandled_exception_logging_hidden_event" ] diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index 4be6314d9..24bdb3f23 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -21,8 +21,8 @@ class PendingChange: id: str # Id of the pending change from the change table value: str # Value of the pending change as a JSON string - creatorsname: str # User name of the user who made the change - email: str # Email address of the user who made the change + _creatorsname: str # User name of the user who made the change #Reserved for future use if needed(retiring now) + _email: str # Email address of the user who made the change #Reserved for future use if needed(retiring now) typeid: str # Type id of the service name: str # Name of the service uid: str # Uid of the service From a26cfc7a2b52da20e5b76bcc4bcf67f38da02491 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 15:20:29 +0000 Subject: [PATCH 7/8] Fixing unit tests for ruff fixes --- application/pyproject.toml | 4 +++- .../service_sync/reject_pending_changes/pending_changes.py | 4 ++-- build/automation/var/project.mk | 3 --- infrastructure/stacks/application/variables.tf | 5 ----- infrastructure/stacks/shared-resources/variables.tf | 5 ----- 5 files changed, 5 insertions(+), 16 deletions(-) diff --git a/application/pyproject.toml b/application/pyproject.toml index a5ad2cb2a..f7ce8f288 100644 --- a/application/pyproject.toml +++ b/application/pyproject.toml @@ -16,5 +16,7 @@ ignore_names = [ "metadata", "recipient_email_address", "recipient_id", - "unhandled_exception_logging_hidden_event" + "unhandled_exception_logging_hidden_event", + "creatorsname", + "email" ] diff --git a/application/service_sync/reject_pending_changes/pending_changes.py b/application/service_sync/reject_pending_changes/pending_changes.py index 24bdb3f23..4be6314d9 100644 --- a/application/service_sync/reject_pending_changes/pending_changes.py +++ b/application/service_sync/reject_pending_changes/pending_changes.py @@ -21,8 +21,8 @@ class PendingChange: id: str # Id of the pending change from the change table value: str # Value of the pending change as a JSON string - _creatorsname: str # User name of the user who made the change #Reserved for future use if needed(retiring now) - _email: str # Email address of the user who made the change #Reserved for future use if needed(retiring now) + creatorsname: str # User name of the user who made the change + email: str # Email address of the user who made the change typeid: str # Type id of the service name: str # Name of the service uid: str # Uid of the service diff --git a/build/automation/var/project.mk b/build/automation/var/project.mk index 74495ad59..b5bb82fbb 100644 --- a/build/automation/var/project.mk +++ b/build/automation/var/project.mk @@ -120,9 +120,6 @@ TF_VAR_signing_key_alias := $(PROJECT_ID)-$(SHARED_ENVIRONMENT)-signing-key-alia TF_VAR_route53_health_check_alarm_region_signing_key_alias := $(PROJECT_ID)-$(SHARED_ENVIRONMENT)-alarm-region-signing-key-alias # S3 -SEND_EMAIL_BUCKET_NAME := $(PROJECT_ID)-$(SHARED_ENVIRONMENT)-send-email-bucket -TF_VAR_send_email_bucket_name := $(SEND_EMAIL_BUCKET_NAME) -TF_VAR_logs_bucket_name := $(PROJECT_ID)-$(SHARED_ENVIRONMENT)-logs-bucket # Cloudwatch monitoring dashboard TF_VAR_shared_resources_sns_topic_app_alerts_for_slack_default_region := $(PROJECT_ID)-$(SHARED_ENVIRONMENT)-shared-resources-topic-app-alerts-for-slack-default-region diff --git a/infrastructure/stacks/application/variables.tf b/infrastructure/stacks/application/variables.tf index 9ea8325fc..09a46dc24 100644 --- a/infrastructure/stacks/application/variables.tf +++ b/infrastructure/stacks/application/variables.tf @@ -133,11 +133,6 @@ variable "sns_topic_app_alerts_for_slack_route53_health_check_alarm_region" { # # S3 # ############## -variable "send_email_bucket_name" { - type = string - description = "Name of the bucket to temporarily store emails to be sent" -} - # ############## # # FIREHOSE # ############## diff --git a/infrastructure/stacks/shared-resources/variables.tf b/infrastructure/stacks/shared-resources/variables.tf index 15e23814c..5274004e2 100755 --- a/infrastructure/stacks/shared-resources/variables.tf +++ b/infrastructure/stacks/shared-resources/variables.tf @@ -115,11 +115,6 @@ variable "shared_resources_sns_topic_app_alerts_for_slack_route53_health_check_a # # S3 # ############## -variable "send_email_bucket_name" { - type = string - description = "Name of the bucket to temporarily store emails to be sent" -} - variable "logs_bucket_name" { type = string description = "Name of the bucket to store logs" From 7e45d5b89313f23cb135360a6cfe88b659418651 Mon Sep 17 00:00:00 2001 From: Ajay Mudgal Date: Wed, 11 Mar 2026 15:32:20 +0000 Subject: [PATCH 8/8] Removing unused terraform variables --- infrastructure/stacks/shared-resources/variables.tf | 5 ----- 1 file changed, 5 deletions(-) diff --git a/infrastructure/stacks/shared-resources/variables.tf b/infrastructure/stacks/shared-resources/variables.tf index 5274004e2..743f36c29 100755 --- a/infrastructure/stacks/shared-resources/variables.tf +++ b/infrastructure/stacks/shared-resources/variables.tf @@ -115,11 +115,6 @@ variable "shared_resources_sns_topic_app_alerts_for_slack_route53_health_check_a # # S3 # ############## -variable "logs_bucket_name" { - type = string - description = "Name of the bucket to store logs" -} - # ############## # # KINESIS # ##############