Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions .github/workflows/deploy-backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,10 @@ env: # Sonarcloud - do not allow direct usage of untrusted data

run-name: Deploy Backend - ${{ inputs.environment }} ${{ inputs.sub_environment }}

concurrency:
group: deploy-backend-${{ github.repository }}-${{ inputs.environment }}-${{ (inputs.environment == 'preprod' || inputs.environment == 'prod') && 'shared-trigger' || inputs.sub_environment }}
cancel-in-progress: false

jobs:
deploy-lambda-images:
name: Deploy ${{ matrix.lambda_name }} image
Expand Down Expand Up @@ -247,6 +251,10 @@ jobs:
working-directory: infrastructure/instance
run: make init

- name: Set Terraform workspace
working-directory: infrastructure/instance
run: make workspace

- name: Terraform Plan
# Ignore cancellations to prevent Terraform from being killed while it holds a state lock
# A stuck process can still be killed with the force-cancel API operation
Expand Down Expand Up @@ -293,6 +301,10 @@ jobs:
working-directory: infrastructure/instance
run: make init

- name: Set Terraform workspace
working-directory: infrastructure/instance
run: make workspace

- name: Terraform Apply
Comment thread
Thomas-Boyle marked this conversation as resolved.
# Ignore cancellations to prevent Terraform from being killed while it holds a state lock
# A stuck process can still be killed with the force-cancel API operation
Expand All @@ -302,6 +314,40 @@ jobs:
make apply-ci
echo "ID_SYNC_QUEUE_ARN=$(make -s output name=id_sync_queue_arn)" >> $GITHUB_ENV

- name: Terraform Init Event Source Mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: make init

- name: Terraform Format Check Event Source Mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: make fmt-check

- name: Terraform Validate Event Source Mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: make validate

- name: Terraform Plan Event Source Mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: make plan-ci

- name: Save Event Source Mapping Terraform Plan
if: ${{ !failure() }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
with:
name: ${{ env.ENVIRONMENT }}-${{ env.SUB_ENVIRONMENT }}-event-source-mappings-tfplan
path: infrastructure/event_source_mappings/tfplan

- name: Terraform Apply Event Source Mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: |
make apply-ci
make verify

- name: Install poetry
if: ${{ inputs.environment == 'dev' && inputs.create_mns_subscription }}
run: pip install poetry==2.1.4
Expand Down
111 changes: 111 additions & 0 deletions .github/workflows/migrate-event-source-mappings.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
name: Migrate Event Source Mappings

on:
workflow_dispatch:
inputs:
apigee_environment:
type: choice
description: Select the Apigee proxy environment for dev state buckets
options:
- internal-dev
- internal-qa
- ref
default: internal-dev
environment:
type: choice
description: Select the AWS backend environment
options:
- dev
- preprod
- prod
sub_environment:
type: string
description: Set the sub-environment name, e.g. internal-dev, int-blue, blue
required: true
confirm_event_source_mapping_migration:
type: boolean
description: Confirm this is the controlled one-time migration for the selected environment
required: true
default: false

env:
APIGEE_ENVIRONMENT: ${{ inputs.apigee_environment }}
ENVIRONMENT: ${{ inputs.environment }}
SUB_ENVIRONMENT: ${{ inputs.sub_environment }}

run-name: Migrate Event Source Mappings - ${{ inputs.environment }} ${{ inputs.sub_environment }}

concurrency:
group: deploy-backend-${{ github.repository }}-${{ inputs.environment }}-${{ (inputs.environment == 'preprod' || inputs.environment == 'prod') && 'shared-trigger' || inputs.sub_environment }}
cancel-in-progress: false

jobs:
migrate-event-source-mappings:
permissions:
id-token: write
contents: read
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
steps:
- name: Confirm controlled migration
run: |
set -euo pipefail
if [ "${CONFIRM_EVENT_SOURCE_MAPPING_MIGRATION}" != "true" ]; then
echo "This workflow is only for the controlled one-time event source mapping migration."
echo "Set confirm_event_source_mapping_migration to true to continue."
exit 1
fi
env:
CONFIRM_EVENT_SOURCE_MAPPING_MIGRATION: ${{ inputs.confirm_event_source_mapping_migration }}

- name: Checkout
uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98

- name: Connect to AWS
uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ vars.AWS_ACCOUNT_ID }}:role/auto-ops
role-session-name: github-actions

- uses: hashicorp/setup-terraform@5e8dbf3c6d9deaf4193ca7a8fb23f2ac83bb6c85
with:
terraform_version: "1.12.2"

- name: Terraform Init
working-directory: infrastructure/event_source_mappings
run: make init

- name: Adopt Existing Event Source Mappings
working-directory: infrastructure/event_source_mappings
env:
ALLOW_EVENT_SOURCE_MAPPING_ADOPTION: "true"
run: make adopt

- name: Terraform Format Check
working-directory: infrastructure/event_source_mappings
run: make fmt-check

- name: Terraform Validate
working-directory: infrastructure/event_source_mappings
run: make validate

- name: Terraform Plan
working-directory: infrastructure/event_source_mappings
run: make plan-ci

- name: Save Terraform Plan
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
with:
name: ${{ env.ENVIRONMENT }}-${{ env.SUB_ENVIRONMENT }}-event-source-mappings-migration-tfplan
path: infrastructure/event_source_mappings/tfplan

- name: Terraform Apply
working-directory: infrastructure/event_source_mappings
run: make apply-ci

- name: Verify Event Source Mappings
working-directory: infrastructure/event_source_mappings
run: make verify
6 changes: 6 additions & 0 deletions .github/workflows/pr-teardown.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,12 @@ jobs:
echo "Unsubscribing SQS to MNS for notifications..."
make unsubscribe

- name: Destroy Lambda event source mappings
working-directory: infrastructure/event_source_mappings
run: |
make init apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT
make destroy apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT

- name: Terraform Destroy
working-directory: infrastructure/instance
run: |
Expand Down
25 changes: 25 additions & 0 deletions infrastructure/event_source_mappings/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

68 changes: 68 additions & 0 deletions infrastructure/event_source_mappings/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
-include .env

apigee_environment ?= $(APIGEE_ENVIRONMENT)
environment ?= $(ENVIRONMENT)
sub_environment ?= $(SUB_ENVIRONMENT)
sub_environment_dir := $(if $(findstring pr-,$(sub_environment)),pr,$(sub_environment))
tf_var_file := ../instance/environments/$(environment)/$(sub_environment_dir)/variables.tfvars
has_sub_environment_scope = $(shell awk -F= '/^has_sub_environment_scope/ { gsub(/[[:space:]]/, "", $$2); print $$2 }' "$(tf_var_file)")
workspace_name = $(if $(filter false,$(has_sub_environment_scope)),$(environment),$(sub_environment))
allow_shared_scope_destroy ?= $(ALLOW_SHARED_SCOPE_DESTROY)

tf_cmd = AWS_PROFILE=$(AWS_PROFILE) terraform

bucket_name = $(if $(filter dev,$(environment)),immunisation-$(apigee_environment),immunisation-$(environment))-terraform-state-files

tf_state = \
-backend-config="bucket=$(bucket_name)" \
-backend-config="key=event-source-mappings/state"

tf_vars = \
-var="sub_environment=$(sub_environment)" \
-var-file="$(tf_var_file)"

init:
$(tf_cmd) init $(tf_state) -upgrade

workspace:
$(tf_cmd) workspace select -or-create $(workspace_name) && echo "Switched to workspace/environment: $(workspace_name)"

adopt: workspace
ENVIRONMENT='$(environment)' SUB_ENVIRONMENT='$(sub_environment)' RESOURCE_SCOPE='$(workspace_name)' bash ../../utilities/scripts/adopt_event_source_mappings.sh $(tf_vars)

fmt-check:
$(tf_cmd) fmt -check

validate: workspace
$(tf_cmd) validate

plan: workspace
$(tf_cmd) plan $(tf_vars)

plan-ci: workspace
$(tf_cmd) plan $(tf_vars) -out=tfplan -input=false

apply: workspace
$(tf_cmd) apply $(tf_vars) --auto-approve

apply-ci: workspace
$(tf_cmd) apply $(tf_vars) -input=false tfplan

verify:
ENVIRONMENT='$(environment)' SUB_ENVIRONMENT='$(sub_environment)' RESOURCE_SCOPE='$(workspace_name)' EVENT_SOURCE_MAPPING_ACTION=verify bash ../../utilities/scripts/adopt_event_source_mappings.sh

ensure-destroy-allowed:
@if [ "$(has_sub_environment_scope)" = "false" ] && [ "$(allow_shared_scope_destroy)" != "true" ]; then \
echo "Refusing to destroy shared event source mappings in workspace $(workspace_name). Set ALLOW_SHARED_SCOPE_DESTROY=true for controlled teardown."; \
exit 1; \
fi

destroy: workspace ensure-destroy-allowed
$(tf_cmd) destroy $(tf_vars) -auto-approve
$(tf_cmd) workspace select default
$(tf_cmd) workspace delete $(workspace_name)

output:
$(tf_cmd) output -raw $(name)

.PHONY : init workspace adopt fmt-check validate plan plan-ci apply apply-ci verify ensure-destroy-allowed destroy output
78 changes: 78 additions & 0 deletions infrastructure/event_source_mappings/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 6"
}
}
backend "s3" {
region = "eu-west-2"
key = "event-source-mappings/state"
use_lockfile = true
}
required_version = ">= 1.5.0"
}

provider "aws" {
region = var.aws_region
default_tags {
tags = {
Project = var.project_name
Environment = local.resource_scope
Service = var.service
}
}
}

locals {
resource_scope = var.has_sub_environment_scope ? var.sub_environment : var.environment
short_prefix = "${var.project_short_name}-${var.sub_environment}"
events_table_name = "imms-${local.resource_scope}-imms-events"
id_sync_queue_name = "imms-${local.resource_scope}-id-sync-queue"
delta_lambda_name = "${local.short_prefix}-delta-lambda"
delta_dlq_name = "${local.short_prefix}-delta-dlq"
id_sync_lambda_name = "${local.short_prefix}-id-sync-lambda"
}

data "aws_dynamodb_table" "events" {
name = local.events_table_name
}

data "aws_sqs_queue" "delta_dlq" {
name = local.delta_dlq_name
}

data "aws_sqs_queue" "id_sync" {
name = local.id_sync_queue_name
}

data "aws_lambda_function" "delta" {
function_name = local.delta_lambda_name
}

data "aws_lambda_function" "id_sync" {
function_name = local.id_sync_lambda_name
}

resource "aws_lambda_event_source_mapping" "delta_trigger" {
event_source_arn = data.aws_dynamodb_table.events.stream_arn
function_name = data.aws_lambda_function.delta.function_name
starting_position = "TRIM_HORIZON"

destination_config {
on_failure {
destination_arn = data.aws_sqs_queue.delta_dlq.arn
}
}

maximum_retry_attempts = 0
}

resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
event_source_arn = data.aws_sqs_queue.id_sync.arn
function_name = data.aws_lambda_function.id_sync.arn

batch_size = 10
maximum_batching_window_in_seconds = 5
function_response_types = ["ReportBatchItemFailures"]
}
Loading
Loading