Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/cicd-1-pull-request.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ jobs:
echo "build_datetime=$datetime" >> $GITHUB_OUTPUT
echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT
echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT
- name: "Check if pull request exists for this branch"
id: pr_exists
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/cicd-2-publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ jobs:
echo "build_datetime=$datetime" >> $GITHUB_OUTPUT
echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT
echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
# TODO: Get the version, but it may not be the .version file as this should come from the CI/CD Pull Request Workflow
echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT
- name: "List variables"
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/cicd-3-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ jobs:
echo "build_datetime=$datetime" >> $GITHUB_OUTPUT
echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT
echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform " .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "nodejs_version=$(grep "^nodejs\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "python_version=$(grep "^python\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
echo "terraform_version=$(grep "^terraform\s" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT
# TODO: Get the version, but it may not be the .version file as this should come from the CI/CD Pull Request Workflow
echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT
echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
Expand Down
Empty file.
Empty file removed infrastructure/images/.gitkeep
Empty file.
Empty file removed infrastructure/modules/.gitkeep
Empty file.
1 change: 1 addition & 0 deletions infrastructure/terraform/bootstrap/s3_bucket.tf
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#tfsec:ignore:aws-s3-enable-bucket-logging Bucket exists before anyother bucket can exist
resource "aws_s3_bucket" "bucket" {
bucket = var.bucket_name

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
resource "aws_cloudwatch_event_rule" "aws_backup_errors" {
name = "${local.csi}-aws-backup-errors"
name = "${local.csi}-aws-backup-errors"
description = "Forwards AWS Backup state changes to Custom Event Bus in Observability Account"

event_pattern = jsonencode({
source = ["aws.backup"],
source = ["aws.backup"],
"detail-type" = ["Backup Job State Change", "Restore Job State Change", "Copy Job State Change"],
detail = {
state = ["FAILED", "ABORTED"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ data "aws_iam_policy_document" "sso_read_only_table_access" {
]
}

#tfsec:ignore:aws-iam-no-policy-wildcards Policy voilation expected for CI user role
statement {
sid = "AllowAthenaAccess3"
effect = "Allow"
Expand All @@ -85,7 +86,7 @@ data "aws_iam_policy_document" "sso_read_only_table_access" {
"athena:ListWorkGroups"
]

resources = [ "*" ] # Access to List all above is required. Condition keys not supported for these resources.
resources = ["*"] # Access to List all above is required. Condition keys not supported for these resources.
}

statement {
Expand All @@ -105,7 +106,7 @@ data "aws_iam_policy_document" "sso_read_only_table_access" {
]

condition {
test = "ForAnyValue:StringLike"
test = "ForAnyValue:StringLike"
variable = "kms:ResourceAliases"
values = [
"alias/${var.project}-*-reporting-s3"
Expand Down
10 changes: 5 additions & 5 deletions infrastructure/terraform/components/acct/locals_tfscaffold.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ locals {
default_tags = merge(
var.default_tags,
{
Project = var.project
Environment = var.environment
Component = var.component
Group = var.group
Name = local.csi
Project = var.project
Environment = var.environment
Component = var.component
Group = var.group
Name = local.csi
},
)
}
10 changes: 5 additions & 5 deletions infrastructure/terraform/components/acct/provider_aws.tf
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ provider "aws" {

default_tags {
tags = {
Project = var.project
Environment = var.environment
Component = var.component
Group = var.group
Name = local.csi
Project = var.project
Environment = var.environment
Component = var.component
Group = var.group
Name = local.csi
}
}
}
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-athena-enable-at-rest-encryption
resource "aws_athena_workgroup" "core" {
name = "${local.csi}-core"
description = "Athena Workgroup for core egress queries in ${local.parameter_bundle.environment} environment"
force_destroy = true

configuration {
enforce_workgroup_configuration = false
enforce_workgroup_configuration = true

result_configuration {
expected_bucket_owner = var.core_account_id
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,15 @@
# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-athena-enable-at-rest-encryption
resource "aws_athena_workgroup" "housekeeping" {
name = "${local.csi}-housekeeping"
description = "Athena Workgroup for housekeeping queries in ${local.parameter_bundle.environment} environment"
force_destroy = true

#tfsec:ignore:aws-athena-no-encryption-override At AWS Support suggestion
configuration {
enforce_workgroup_configuration = true
enforce_workgroup_configuration = false

result_configuration {
expected_bucket_owner = local.this_account
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-athena-enable-at-rest-encryption
resource "aws_athena_workgroup" "ingestion" {
name = "${local.csi}-ingestion"
description = "Athena Workgroup for data ingestion into ${local.parameter_bundle.environment} environment"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-athena-enable-at-rest-encryption
resource "aws_athena_workgroup" "setup" {
name = "${local.csi}-setup"
description = "Athena Workgroup for setup and data migration in ${local.parameter_bundle.environment} environment"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-athena-enable-at-rest-encryption
resource "aws_athena_workgroup" "user" {
name = "${local.csi}-user"
description = "Athena Workgroup for user queries in ${local.parameter_bundle.environment} environment"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ data "aws_availability_zones" "available" {
state = "available"
}

#tfsec:ignore:aws-ec2-no-public-egress-sgr
resource "aws_security_group" "powerbi_gateway" {
count = var.enable_powerbi_gateway ? 1 : 0

Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#tfsec:ignore:aws-s3-enable-bucket-logging Don't log access logs logs
resource "aws_s3_bucket" "access_logs" {
bucket = "${local.csi_global}-bucket-logs"
force_destroy = true
Expand Down Expand Up @@ -73,6 +74,10 @@ resource "aws_s3_bucket_public_access_block" "access_logs" {
restrict_public_buckets = true
}

# From Support ticket: Where the client is looking to enable bucket keys on the target S3 bucket, the recommended approach would be:
# 1. Remove the KMS key from the Athena workgroup's encryption configuration: Since the bucket keys will be handling the encryption, you don't need the additional layer of encryption from the workgroup-level KMS key. Removing it will simplify the configuration.
# 2. Rely solely on the bucket keys for encryption: With the bucket keys enabled on the S3 bucket, Athena will automatically use that for encrypting and decrypting the query results. This will reduce the no of API calls.
#tfsec:ignore:aws-s3-encryption-customer-key
resource "aws_s3_bucket_server_side_encryption_configuration" "access_logs" {
bucket = aws_s3_bucket.access_logs.bucket

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ resource "aws_iam_policy" "sfn_completed_batch_report" {
policy = data.aws_iam_policy_document.sfn_completed_batch_report.json
}

#tfsec:ignore:aws-iam-no-policy-wildcards
data "aws_iam_policy_document" "sfn_completed_batch_report" {

statement {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ resource "aws_iam_policy" "sfn_completed_comms_report" {
policy = data.aws_iam_policy_document.sfn_completed_comms_report.json
}

#tfsec:ignore:aws-iam-no-policy-wildcards
data "aws_iam_policy_document" "sfn_completed_comms_report" {

statement {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ resource "aws_sfn_state_machine" "housekeeping" {
"${aws_athena_named_query.request_item_status_summary_batch_vacuum.id}"
]
database_name = "${aws_glue_catalog_database.reporting.name}"
iam_role = "${aws_iam_role.sfn_housekeeping.arn}"
iam_role = "${aws_iam_role.sfn_housekeeping.arn}"
})

logging_configuration {
Expand Down Expand Up @@ -73,6 +73,7 @@ resource "aws_iam_policy" "sfn_housekeeping" {
policy = data.aws_iam_policy_document.sfn_housekeeping.json
}

#tfsec:ignore:aws-iam-no-policy-wildcards
data "aws_iam_policy_document" "sfn_housekeeping" {

statement {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ resource "aws_iam_policy" "sfn_ingestion" {
policy = data.aws_iam_policy_document.sfn_ingestion.json
}

#tfsec:ignore:aws-iam-no-policy-wildcards
data "aws_iam_policy_document" "sfn_ingestion" {

statement {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,9 @@ resource "aws_iam_policy" "sfn_watchdog" {
policy = data.aws_iam_policy_document.sfn_watchdog.json
}

#tfsec:ignore:aws-iam-no-policy-wildcards
data "aws_iam_policy_document" "sfn_watchdog" {

#tfsec:ignore:aws-iam-no-policy-wildcards
statement {
sid = "AllowAthena"
effect = "Allow"
Expand Down
18 changes: 15 additions & 3 deletions scripts/init.mk
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,31 @@ include scripts/tests/test.mk
# ==============================================================================

runner-act: # Run GitHub Actions locally - mandatory: workflow=[workflow file name], job=[job name] @Development
source ./scripts/docker/docker.lib.sh
. "./scripts/docker/docker.lib.sh"; \
act $(shell [[ "${VERBOSE}" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo --verbose) \
--container-architecture linux/amd64 \
--platform ubuntu-latest=$$(name="ghcr.io/nhs-england-tools/github-runner-image" docker-get-image-version-and-pull) \
--platform ubuntu-latest=ghcr.io/catthehacker/ubuntu:full-latest \
--container-options "--privileged" \
--bind \
--pull=false \
--reuse \
--rm \
--defaultbranch main \
--workflows .github/workflows/${workflow}.yaml \
--job ${job}


runner-act-workflow: # Run GitHub Actions locally - mandatory: workflow=[workflow file name] @Development
. "./scripts/docker/docker.lib.sh"; \
act $(shell [[ "${VERBOSE}" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo --verbose) \
--container-architecture linux/amd64 \
--platform ubuntu-latest=ghcr.io/catthehacker/ubuntu:full-latest \
--container-options "--privileged" \
--bind \
--pull=false \
--reuse \
--rm \
--workflows .github/workflows/${workflow}.yaml

version-create-effective-file: # Create effective version file - optional: dir=[path to the VERSION file to use, default is '.'], BUILD_DATETIME=[build date and time in the '%Y-%m-%dT%H:%M:%S%z' format generated by the CI/CD pipeline, default is current date and time] @Development
source scripts/docker/docker.lib.sh
version-create-effective-file
Expand Down
12 changes: 9 additions & 3 deletions scripts/terraform/terraform.lib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,11 @@ function terraform-destroy() {
# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.']
# opts=[options to pass to the Terraform fmt command, default is '-recursive']
function terraform-fmt() {

_terraform fmt -recursive # 'dir' and 'opts' are passed to the function as environment variables, if set
for d in "${PWD}infrastructure/"*; do
if [ -d "$d" ]; then
terraform fmt --recursive "${d}"
fi
done
}

# Validate Terraform code.
Expand All @@ -73,7 +76,10 @@ function _terraform() {
local cmd="-chdir=$dir $* ${opts:-}"
local project_dir="$(git rev-parse --show-toplevel)"

cmd="$cmd" "$project_dir/scripts/terraform/terraform.sh"
echo dir:${dir}
echo cmd:${cmd}
echo project_dir:${project_dir}
cmd="$cmd" "$project_dir/infrastructure/terraform/bin/terraform.sh"
}

# Remove Terraform files.
Expand Down
22 changes: 14 additions & 8 deletions scripts/terraform/terraform.mk
Original file line number Diff line number Diff line change
Expand Up @@ -46,17 +46,12 @@ clean:: # Remove Terraform files (terraform) - optional: terraform_dir|dir=[path
opts=$(or ${terraform_opts}, ${opts})

_terraform: # Terraform command wrapper - mandatory: cmd=[command to execute]; optional: dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], opts=[options to pass to the Terraform command, default is none/empty]
# 'TERRAFORM_STACK' is passed to the functions as environment variable
TERRAFORM_STACK=$(or ${TERRAFORM_STACK}, $(or ${terraform_stack}, $(or ${STACK}, ${stack})))
dir=$(or ${dir}, ${TERRAFORM_STACK})
source scripts/terraform/terraform.lib.sh
. "scripts/terraform/terraform.lib.sh"; \
terraform-${cmd} # 'dir' and 'opts' are accessible by the function as environment variables, if set

terraform-docs: # Terraform-docs check against Terraform files - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform fmt command, default is '-recursive'] @Quality
for dir in ./infrastructure/terraform/components/* ./infrastructure/terraform/modules/*; do \
if [ -d "$$dir" ]; then \
./scripts/terraform/terraform-docs.sh $$dir; \
fi \
done

# ==============================================================================
# Quality checks - please DO NOT edit this section!

Expand All @@ -65,7 +60,18 @@ terraform-shellscript-lint: # Lint all Terraform module shell scripts @Quality
file=$${file} scripts/shellscript-linter.sh
done

terraform-sec: # TFSEC check against Terraform files - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform fmt command, default is '-recursive'] @Quality
tfsec infrastructure/terraform \
--force-all-dirs \
--exclude-downloaded-modules \
--config-file scripts/config/tfsec.yaml

terraform-docs: # Terraform-docs check against Terraform files - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform fmt command, default is '-recursive'] @Quality
for dir in ./infrastructure/terraform/components/* ./infrastructure/terraform/modules/*; do \
if [ -d "$$dir" ]; then \
./scripts/terraform/terraform-docs.sh $$dir; \
fi \
done

# ==============================================================================
# Configuration - please DO NOT edit this section!
Expand Down
2 changes: 0 additions & 2 deletions scripts/terraform/tfsec.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,10 @@ function run-tfsec-natively() {

echo "Running TFSec on directory: $dir_to_scan"
tfsec \
--concise-output \
--force-all-dirs \
--exclude-downloaded-modules \
--config-file scripts/config/tfsec.yaml \
--format text \
--soft-fail \
"$dir_to_scan"

check-tfsec-status
Expand Down
Loading