From a253665c5ae9ad4d3a950f4025607fa61ce865f5 Mon Sep 17 00:00:00 2001 From: Yeonri Date: Wed, 6 May 2026 01:07:25 +0900 Subject: [PATCH 1/3] =?UTF-8?q?chore:=20=EB=B6=80=ED=95=98=20=ED=85=8C?= =?UTF-8?q?=EC=8A=A4=ED=8A=B8=20secret=20=ED=8F=AC=EC=9D=B8=ED=84=B0=20?= =?UTF-8?q?=EA=B0=B1=EC=8B=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 상세내용: 부하 테스트 실행에 필요한 secret submodule 변경 커밋을 상위 인프라 저장소에 반영 --- config/secrets | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/secrets b/config/secrets index f88a84c..b11ecad 160000 --- a/config/secrets +++ b/config/secrets @@ -1 +1 @@ -Subproject commit f88a84cdab72136d294614fd1e2c855c4a026c43 +Subproject commit b11ecadb9329eb4e84b93e98e94d8ab685605caa From d7456eccf62c1d73c0fd7cbf07dbfcaa806b3f7e Mon Sep 17 00:00:00 2001 From: Yeonri Date: Wed, 6 May 2026 01:07:31 +0900 Subject: [PATCH 2/3] =?UTF-8?q?feat:=20=EB=B6=80=ED=95=98=20=ED=85=8C?= =?UTF-8?q?=EC=8A=A4=ED=8A=B8=20RDS=20=EC=9D=B8=ED=94=84=EB=9D=BC=20?= =?UTF-8?q?=EA=B5=AC=EC=84=B1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 상세내용: 부하 테스트용 RDS, 보안 그룹, SSM datasource 파라미터를 Terraform으로 정의 - 상세내용: prod/stage EC2 보안 그룹에서 loadtest RDS 3306 접근을 허용하도록 구성 --- environment/load_test/main.tf | 145 ++++++++++++++++++++++++++++- environment/load_test/output.tf | 49 ++++++++++ environment/load_test/provider.tf | 19 ++++ environment/load_test/variables.tf | 77 ++++++++++++++- 4 files changed, 288 insertions(+), 2 deletions(-) create mode 100644 environment/load_test/output.tf diff --git a/environment/load_test/main.tf b/environment/load_test/main.tf index 995074f..72b1c58 100644 --- a/environment/load_test/main.tf +++ b/environment/load_test/main.tf @@ -1 +1,144 @@ -# TODO:: 부하 테스트용 EC2 인스턴스 및 보안 그룹 리소스 정의 필요 +data "aws_vpc" "default" { + default = true +} + +data "aws_subnets" "default" { + filter { + name = "vpc-id" + values = [data.aws_vpc.default.id] + } +} + +data "aws_instance" "prod_api" { + filter { + name = "tag:Name" + values = [var.prod_api_instance_name] + } + + filter { + name = "instance-state-name" + values = ["running"] + } +} + +data "aws_instance" "stage_api" { + filter { + name = "tag:Name" + values = [var.stage_api_instance_name] + } + + filter { + name = "instance-state-name" + values = ["running"] + } +} + +data "aws_db_instance" "prod" { + db_instance_identifier = var.prod_rds_identifier +} + +data "aws_ssm_parameter" "db_root_username" { + name = var.load_test_db_username_parameter_name +} + +data "aws_ssm_parameter" "db_root_password" { + name = var.load_test_db_password_parameter_name + with_decryption = true +} + +locals { + db_root_username = data.aws_ssm_parameter.db_root_username.value + db_root_password = data.aws_ssm_parameter.db_root_password.value + + source_security_group_ids = setunion( + data.aws_instance.prod_api.vpc_security_group_ids, + data.aws_instance.stage_api.vpc_security_group_ids + ) +} + +resource "aws_security_group" "load_test_db" { + name = "sc-load-test-db-sg" + description = "Security group for load test RDS" + vpc_id = data.aws_vpc.default.id + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "solid-connection-load-test-db-sg" + } +} + +resource "aws_security_group_rule" "load_test_db_mysql" { + for_each = local.source_security_group_ids + + type = "ingress" + description = "MySQL from prod/stage API server" + from_port = 3306 + to_port = 3306 + protocol = "tcp" + security_group_id = aws_security_group.load_test_db.id + source_security_group_id = each.value +} + +resource "aws_db_subnet_group" "load_test" { + name = "sc-load-test-db-subnet-group" + subnet_ids = data.aws_subnets.default.ids + + tags = { + Name = "solid-connection-load-test-db-subnet-group" + } +} + +resource "aws_db_instance" "load_test" { + identifier = var.rds_identifier + allocated_storage = var.allocated_storage + engine = "mysql" + engine_version = var.db_engine_version + instance_class = var.db_instance_class + db_name = var.db_name + username = local.db_root_username + password = local.db_root_password + parameter_group_name = var.db_parameter_group_name + db_subnet_group_name = aws_db_subnet_group.load_test.name + vpc_security_group_ids = [aws_security_group.load_test_db.id] + publicly_accessible = false + skip_final_snapshot = true + copy_tags_to_snapshot = true + deletion_protection = false + backup_retention_period = 0 + apply_immediately = true + storage_encrypted = true + kms_key_id = var.kms_key_arn + + tags = { + Name = var.rds_identifier + } +} + +resource "aws_ssm_parameter" "load_test_datasource_url" { + name = "${var.load_test_parameter_prefix}/spring.datasource.url" + type = "String" + value = "jdbc:mysql://${aws_db_instance.load_test.address}:${aws_db_instance.load_test.port}/${var.db_name}?serverTimezone=Asia/Seoul&characterEncoding=UTF-8" + overwrite = true +} + +resource "aws_ssm_parameter" "load_test_datasource_username" { + name = "${var.load_test_parameter_prefix}/spring.datasource.username" + type = "String" + value = local.db_root_username + overwrite = true +} + +resource "aws_ssm_parameter" "load_test_datasource_password" { + name = "${var.load_test_parameter_prefix}/spring.datasource.password" + type = "SecureString" + value = local.db_root_password + key_id = var.ssm_kms_key_id + overwrite = true + tier = "Standard" +} diff --git a/environment/load_test/output.tf b/environment/load_test/output.tf new file mode 100644 index 0000000..55390ac --- /dev/null +++ b/environment/load_test/output.tf @@ -0,0 +1,49 @@ +output "load_test_rds_endpoint" { + description = "Load test RDS endpoint" + value = aws_db_instance.load_test.address +} + +output "load_test_rds_port" { + description = "Load test RDS port" + value = aws_db_instance.load_test.port +} + +output "load_test_rds_identifier" { + description = "Load test RDS identifier" + value = aws_db_instance.load_test.identifier +} + +output "load_test_db_name" { + description = "Load test database name" + value = var.db_name +} + +output "prod_rds_endpoint" { + description = "Prod RDS endpoint used as dump source" + value = data.aws_db_instance.prod.address +} + +output "prod_rds_port" { + description = "Prod RDS port" + value = data.aws_db_instance.prod.port +} + +output "prod_api_instance_id" { + description = "Prod API EC2 instance ID used to run migration commands" + value = data.aws_instance.prod_api.id +} + +output "stage_api_instance_id" { + description = "Stage API EC2 instance ID" + value = data.aws_instance.stage_api.id +} + +output "stage_api_public_ip" { + description = "Stage API EC2 public IP" + value = data.aws_instance.stage_api.public_ip +} + +output "load_test_ssm_parameter_prefix" { + description = "SSM Parameter Store prefix for load test datasource values" + value = var.load_test_parameter_prefix +} diff --git a/environment/load_test/provider.tf b/environment/load_test/provider.tf index 3c3f8d1..8b41b3d 100644 --- a/environment/load_test/provider.tf +++ b/environment/load_test/provider.tf @@ -1,3 +1,22 @@ +terraform { + required_version = ">= 1.10.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.0" + } + } + + backend "s3" { + bucket = "solid-connection-tfstate" + key = "env/load_test/terraform.tfstate" + region = "ap-northeast-2" + use_lockfile = true + encrypt = true + } +} + provider "aws" { region = "ap-northeast-2" default_tags { diff --git a/environment/load_test/variables.tf b/environment/load_test/variables.tf index 6f74e1f..7d4d639 100644 --- a/environment/load_test/variables.tf +++ b/environment/load_test/variables.tf @@ -1 +1,76 @@ -# TODO:: 부하 테스트 인스턴스용 변수 정의 +variable "rds_identifier" { + description = "RDS identifier for load test" + type = string +} + +variable "db_instance_class" { + description = "RDS instance class for load test" + type = string +} + +variable "allocated_storage" { + description = "RDS storage in GiB" + type = number + default = 20 +} + +variable "db_engine_version" { + description = "MySQL engine version" + type = string +} + +variable "db_parameter_group_name" { + description = "MySQL parameter group name" + type = string +} + +variable "db_name" { + description = "Application database name" + type = string + default = "solid_connection" +} + +variable "load_test_db_username_parameter_name" { + description = "SSM parameter name containing the load test DB root username" + type = string +} + +variable "load_test_db_password_parameter_name" { + description = "SSM SecureString parameter name containing the load test DB root password" + type = string +} + +variable "kms_key_arn" { + description = "KMS key ARN for RDS storage encryption" + type = string +} + +variable "ssm_kms_key_id" { + description = "KMS key ID or ARN for SSM SecureString. Null uses the AWS managed aws/ssm key." + type = string + default = null + nullable = true +} + +variable "prod_rds_identifier" { + description = "Source prod RDS identifier" + type = string +} + +variable "prod_api_instance_name" { + description = "Name tag of the prod API EC2 instance used to run dump/restore" + type = string + default = "solid-connection-server-prod" +} + +variable "stage_api_instance_name" { + description = "Name tag of the stage API EC2 instance that will connect to load test RDS" + type = string + default = "solid-connection-server-stage" +} + +variable "load_test_parameter_prefix" { + description = "SSM Parameter Store prefix for load test datasource values" + type = string + default = "/solid-connection/loadtest" +} From 95da845031ce00a915f26dfb6bea3d7a80d9c21d Mon Sep 17 00:00:00 2001 From: Yeonri Date: Wed, 6 May 2026 01:07:36 +0900 Subject: [PATCH 3/3] =?UTF-8?q?feat:=20=EB=B6=80=ED=95=98=20=ED=85=8C?= =?UTF-8?q?=EC=8A=A4=ED=8A=B8=20=EC=8B=A4=ED=96=89=20=EC=8A=A4=ED=81=AC?= =?UTF-8?q?=EB=A6=BD=ED=8A=B8=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 상세내용: start.sh에서 RDS 생성, stage 전환, prod 데이터 복사를 자동화 - 상세내용: stop.sh에서 stage 원복과 loadtest RDS destroy 흐름을 제공 - 상세내용: Windows와 macOS/Linux 실행 환경에서 사용할 bash 기반 절차를 문서화 --- scripts/load_test/README.md | 49 +++++++ scripts/load_test/start.sh | 264 ++++++++++++++++++++++++++++++++++++ scripts/load_test/stop.sh | 140 +++++++++++++++++++ 3 files changed, 453 insertions(+) create mode 100644 scripts/load_test/README.md create mode 100644 scripts/load_test/start.sh create mode 100644 scripts/load_test/stop.sh diff --git a/scripts/load_test/README.md b/scripts/load_test/README.md new file mode 100644 index 0000000..2e4d129 --- /dev/null +++ b/scripts/load_test/README.md @@ -0,0 +1,49 @@ +# Load Test Automation + +This automation creates a temporary load test RDS instance, copies prod RDS data +into it, writes load test datasource values to Parameter Store, and optionally +stops/starts the stage application through SSM Run Command. + +## Flow + +1. `Start-LoadTest.ps1` runs `terraform apply` in `environment/load_test`. +2. Terraform creates the load test RDS and writes: + - `/solid-connection/loadtest/spring.datasource.url` + - `/solid-connection/loadtest/spring.datasource.username` + - `/solid-connection/loadtest/spring.datasource.password` +3. The script stores DB migration credentials in temporary SSM parameters. +4. The prod EC2 instance runs `mysqldump` against prod RDS and restores it into + the load test RDS. +5. The optional stage stop command can pause the stage app before the load test. +6. `Stop-LoadTest.ps1` can run an optional stage start command and then destroy + only the load test Terraform stack. + +## Example + +```bash +scripts/load_test/start.sh \ + --switch-stage-to-loadtest \ + --stage-ssh-key ./stage-key.pem +``` + +```bash +scripts/load_test/stop.sh \ + --restore-stage-dev \ + --stage-ssh-key ./stage-key.pem +``` + +## Notes + +- The prod and stage EC2 instances are looked up by their `Name` tags. +- Prod DB username/password are read from Parameter Store. The default paths are + `/solid-connection/prod/spring.datasource.username` and + `/solid-connection/prod/spring.datasource.password`. +- Load test DB username/password are also read from Parameter Store. The default + paths are `/solid-connection/loadtest/spring.datasource.username` and + `/solid-connection/loadtest/spring.datasource.password`. +- The load test RDS security group allows MySQL only from the security groups + attached to the prod and stage API EC2 instances. +- The prod EC2 instance must have SSM access and permission to read the temporary + migration parameters. +- Keep the real `load_test.tfvars` in the secret submodule or another ignored + local location. Do not commit it. diff --git a/scripts/load_test/start.sh b/scripts/load_test/start.sh new file mode 100644 index 0000000..d962215 --- /dev/null +++ b/scripts/load_test/start.sh @@ -0,0 +1,264 @@ +#!/usr/bin/env bash +set -euo pipefail + +TERRAFORM_DIR="environment/load_test" +VAR_FILE="../../config/secrets/load_test.tfvars" +DATABASE_NAME="solid_connection" +MIGRATION_PARAMETER_PREFIX="/solid-connection/loadtest/migration" +PROD_DB_USERNAME_PARAMETER="/solid-connection/prod/spring.datasource.username" +PROD_DB_PASSWORD_PARAMETER="/solid-connection/prod/spring.datasource.password" +LOADTEST_DB_USERNAME_PARAMETER="/solid-connection/loadtest/spring.datasource.username" +LOADTEST_DB_PASSWORD_PARAMETER="/solid-connection/loadtest/spring.datasource.password" +SWITCH_STAGE_TO_LOADTEST="false" +STAGE_SSH_USER="ubuntu" +STAGE_SSH_KEY="" +STAGE_APP_DIR="/home/ubuntu/solid-connection-dev" +STAGE_COMPOSE_FILE="docker-compose.dev.yml" +SKIP_TERRAFORM_APPLY="false" +SKIP_DATA_COPY="false" + +usage() { + cat <<'EOF' +Usage: scripts/load_test/start.sh [options] + +Options: + --terraform-dir PATH Default: environment/load_test + --var-file PATH Default: ../../config/secrets/load_test.tfvars + --prod-db-username-parameter Default: /solid-connection/prod/spring.datasource.username + --prod-db-password-parameter Default: /solid-connection/prod/spring.datasource.password + --loadtest-db-username-parameter Default: /solid-connection/loadtest/spring.datasource.username + --loadtest-db-password-parameter Default: /solid-connection/loadtest/spring.datasource.password + --database-name VALUE Default: solid_connection + --migration-prefix VALUE Default: /solid-connection/loadtest/migration + --switch-stage-to-loadtest Restart stage app over SSH with dev,loadtest profiles + --stage-ssh-user VALUE Default: ubuntu + --stage-ssh-key PATH Required with --switch-stage-to-loadtest + --stage-app-dir PATH Default: /home/ubuntu/solid-connection-dev + --stage-compose-file VALUE Default: docker-compose.dev.yml + --skip-terraform-apply + --skip-data-copy + -h, --help +EOF +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --terraform-dir) TERRAFORM_DIR="$2"; shift 2 ;; + --var-file) VAR_FILE="$2"; shift 2 ;; + --prod-db-username-parameter) PROD_DB_USERNAME_PARAMETER="$2"; shift 2 ;; + --prod-db-password-parameter) PROD_DB_PASSWORD_PARAMETER="$2"; shift 2 ;; + --loadtest-db-username-parameter) LOADTEST_DB_USERNAME_PARAMETER="$2"; shift 2 ;; + --loadtest-db-password-parameter) LOADTEST_DB_PASSWORD_PARAMETER="$2"; shift 2 ;; + --database-name) DATABASE_NAME="$2"; shift 2 ;; + --migration-prefix) MIGRATION_PARAMETER_PREFIX="$2"; shift 2 ;; + --switch-stage-to-loadtest) SWITCH_STAGE_TO_LOADTEST="true"; shift ;; + --stage-ssh-user) STAGE_SSH_USER="$2"; shift 2 ;; + --stage-ssh-key) STAGE_SSH_KEY="$2"; shift 2 ;; + --stage-app-dir) STAGE_APP_DIR="$2"; shift 2 ;; + --stage-compose-file) STAGE_COMPOSE_FILE="$2"; shift 2 ;; + --skip-terraform-apply) SKIP_TERRAFORM_APPLY="true"; shift ;; + --skip-data-copy) SKIP_DATA_COPY="true"; shift ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +require_value() { + local name="$1" + local value="$2" + if [[ -z "$value" ]]; then + echo "Missing required option: $name" >&2 + exit 1 + fi +} + +require_command() { + if ! command -v "$1" >/dev/null 2>&1; then + echo "Required command not found: $1" >&2 + exit 1 + fi +} + +require_command terraform +require_command aws +require_command jq +require_command ssh + +tf_output() { + terraform -chdir="$TERRAFORM_DIR" output -raw "$1" +} + +send_ssm_command() { + local instance_id="$1" + local comment="$2" + local commands_json="$3" + + local command_id + command_id="$(aws ssm send-command \ + --instance-ids "$instance_id" \ + --document-name "AWS-RunShellScript" \ + --comment "$comment" \ + --parameters "$commands_json" \ + --query "Command.CommandId" \ + --output text)" + + local status + while true; do + sleep 5 + status="$(aws ssm get-command-invocation \ + --command-id "$command_id" \ + --instance-id "$instance_id" \ + --query "Status" \ + --output text 2>/dev/null || true)" + + case "$status" in + Pending|InProgress|Delayed|"") continue ;; + Success) break ;; + *) + aws ssm get-command-invocation \ + --command-id "$command_id" \ + --instance-id "$instance_id" \ + --output json || true + echo "SSM command failed with status $status: $comment" >&2 + exit 1 + ;; + esac + done +} + +delete_temp_parameters() { + aws ssm delete-parameter --name "$MIGRATION_PARAMETER_PREFIX/prod-db-username" >/dev/null 2>&1 || true + aws ssm delete-parameter --name "$MIGRATION_PARAMETER_PREFIX/prod-db-password" >/dev/null 2>&1 || true + aws ssm delete-parameter --name "$MIGRATION_PARAMETER_PREFIX/loadtest-db-username" >/dev/null 2>&1 || true + aws ssm delete-parameter --name "$MIGRATION_PARAMETER_PREFIX/loadtest-db-password" >/dev/null 2>&1 || true +} + +if [[ "$SKIP_TERRAFORM_APPLY" != "true" ]]; then + terraform -chdir="$TERRAFORM_DIR" init + terraform -chdir="$TERRAFORM_DIR" apply -auto-approve -var-file="$VAR_FILE" +fi + +prod_instance_id="$(tf_output prod_api_instance_id)" +stage_instance_id="$(tf_output stage_api_instance_id)" +stage_public_ip="$(tf_output stage_api_public_ip)" +prod_endpoint="$(tf_output prod_rds_endpoint)" +prod_port="$(tf_output prod_rds_port)" +loadtest_endpoint="$(tf_output load_test_rds_endpoint)" +loadtest_port="$(tf_output load_test_rds_port)" + +if [[ "$SWITCH_STAGE_TO_LOADTEST" == "true" ]]; then + require_value "--stage-ssh-key" "$STAGE_SSH_KEY" + + ssh -i "$STAGE_SSH_KEY" \ + -o StrictHostKeyChecking=no \ + "$STAGE_SSH_USER@$stage_public_ip" \ + "APP_DIR='$STAGE_APP_DIR' COMPOSE_FILE='$STAGE_COMPOSE_FILE' bash -s" <<'REMOTE' +set -euo pipefail + +cd "$APP_DIR" + +CURRENT_IMAGE="$(docker inspect -f '{{.Config.Image}}' solid-connection-dev 2>/dev/null || true)" +if [[ -z "$CURRENT_IMAGE" ]]; then + echo "solid-connection-dev container is not running; cannot infer image tag" >&2 + exit 1 +fi + +OWNER_LOWERCASE="$(echo "$CURRENT_IMAGE" | sed -E 's#^ghcr.io/([^/]+)/.*#\1#')" +IMAGE_TAG="$(echo "$CURRENT_IMAGE" | sed -E 's#.*:([^:]+)$#\1#')" + +cat > docker-compose.loadtest.override.yml <<'YAML' +services: + solid-connection-dev: + environment: + - SPRING_PROFILES_ACTIVE=dev,loadtest + - AWS_REGION=ap-northeast-2 + - SPRING_DATA_REDIS_HOST=127.0.0.1 + - SPRING_DATA_REDIS_PORT=6379 +YAML + +docker compose -f "$COMPOSE_FILE" -f docker-compose.loadtest.override.yml down || true +OWNER_LOWERCASE="$OWNER_LOWERCASE" IMAGE_TAG="$IMAGE_TAG" \ + docker compose -f "$COMPOSE_FILE" -f docker-compose.loadtest.override.yml up -d solid-connection-dev +REMOTE +fi + +if [[ "$SKIP_DATA_COPY" != "true" ]]; then + trap delete_temp_parameters EXIT + + prod_db_username="$(aws ssm get-parameter \ + --name "$PROD_DB_USERNAME_PARAMETER" \ + --query "Parameter.Value" \ + --output text)" + + prod_db_password="$(aws ssm get-parameter \ + --name "$PROD_DB_PASSWORD_PARAMETER" \ + --with-decryption \ + --query "Parameter.Value" \ + --output text)" + + loadtest_db_username="$(aws ssm get-parameter \ + --name "$LOADTEST_DB_USERNAME_PARAMETER" \ + --query "Parameter.Value" \ + --output text)" + + loadtest_db_password="$(aws ssm get-parameter \ + --name "$LOADTEST_DB_PASSWORD_PARAMETER" \ + --with-decryption \ + --query "Parameter.Value" \ + --output text)" + + aws ssm put-parameter \ + --name "$MIGRATION_PARAMETER_PREFIX/prod-db-username" \ + --type String \ + --value "$prod_db_username" \ + --overwrite >/dev/null + + aws ssm put-parameter \ + --name "$MIGRATION_PARAMETER_PREFIX/prod-db-password" \ + --type SecureString \ + --value "$prod_db_password" \ + --overwrite >/dev/null + + aws ssm put-parameter \ + --name "$MIGRATION_PARAMETER_PREFIX/loadtest-db-username" \ + --type String \ + --value "$loadtest_db_username" \ + --overwrite >/dev/null + + aws ssm put-parameter \ + --name "$MIGRATION_PARAMETER_PREFIX/loadtest-db-password" \ + --type SecureString \ + --value "$loadtest_db_password" \ + --overwrite >/dev/null + + copy_commands_json="$(jq -cn \ + --arg prefix "$MIGRATION_PARAMETER_PREFIX" \ + --arg prod_endpoint "$prod_endpoint" \ + --arg prod_port "$prod_port" \ + --arg loadtest_endpoint "$loadtest_endpoint" \ + --arg loadtest_port "$loadtest_port" \ + --arg database "$DATABASE_NAME" \ + '{ + commands: [ + "set -euo pipefail", + "export DEBIAN_FRONTEND=noninteractive", + "if ! command -v mysqldump >/dev/null 2>&1 || ! command -v mysql >/dev/null 2>&1; then sudo apt-get update && sudo apt-get install -y mysql-client; fi", + "PROD_USER=$(aws ssm get-parameter --name \($prefix)/prod-db-username --query Parameter.Value --output text)", + "PROD_PASSWORD=$(aws ssm get-parameter --name \($prefix)/prod-db-password --with-decryption --query Parameter.Value --output text)", + "LOAD_USER=$(aws ssm get-parameter --name \($prefix)/loadtest-db-username --query Parameter.Value --output text)", + "LOAD_PASSWORD=$(aws ssm get-parameter --name \($prefix)/loadtest-db-password --with-decryption --query Parameter.Value --output text)", + "DUMP_FILE=/tmp/solid-connection-loadtest-$(date +%Y%m%d%H%M%S).sql.gz", + "MYSQL_PWD=\"$PROD_PASSWORD\" mysqldump --single-transaction --set-gtid-purged=OFF --column-statistics=0 -h \($prod_endpoint) -P \($prod_port) -u \"$PROD_USER\" \($database) | gzip > \"$DUMP_FILE\"", + "MYSQL_PWD=\"$LOAD_PASSWORD\" mysql -h \($loadtest_endpoint) -P \($loadtest_port) -u \"$LOAD_USER\" -e \"DROP DATABASE IF EXISTS \\\`\($database)\\\`; CREATE DATABASE \\\`\($database)\\\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;\"", + "gunzip -c \"$DUMP_FILE\" | MYSQL_PWD=\"$LOAD_PASSWORD\" mysql -h \($loadtest_endpoint) -P \($loadtest_port) -u \"$LOAD_USER\" \($database)", + "rm -f \"$DUMP_FILE\"" + ] + }')" + + send_ssm_command "$prod_instance_id" "Copy prod RDS data to load test RDS" "$copy_commands_json" +fi + +echo "Load test environment is ready." +echo "RDS endpoint: ${loadtest_endpoint}:${loadtest_port}" +echo "Stage instance: ${stage_instance_id}" +echo "Stage public IP: ${stage_public_ip}" diff --git a/scripts/load_test/stop.sh b/scripts/load_test/stop.sh new file mode 100644 index 0000000..f0857ec --- /dev/null +++ b/scripts/load_test/stop.sh @@ -0,0 +1,140 @@ +#!/usr/bin/env bash +set -euo pipefail + +TERRAFORM_DIR="environment/load_test" +VAR_FILE="../../config/secrets/load_test.tfvars" +STAGE_START_COMMAND="" +RESTORE_STAGE_DEV="false" +STAGE_SSH_USER="ubuntu" +STAGE_SSH_KEY="" +STAGE_APP_DIR="/home/ubuntu/solid-connection-dev" +STAGE_COMPOSE_FILE="docker-compose.dev.yml" +SKIP_TERRAFORM_DESTROY="false" + +usage() { + cat <<'EOF' +Usage: scripts/load_test/stop.sh [options] + +Options: + --terraform-dir PATH Default: environment/load_test + --var-file PATH Default: ../../config/secrets/load_test.tfvars + --restore-stage-dev Restart stage app over SSH with dev profile + --stage-ssh-user VALUE Default: ubuntu + --stage-ssh-key PATH Required with --restore-stage-dev + --stage-app-dir PATH Default: /home/ubuntu/solid-connection-dev + --stage-compose-file VALUE Default: docker-compose.dev.yml + --skip-terraform-destroy + -h, --help +EOF +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --terraform-dir) TERRAFORM_DIR="$2"; shift 2 ;; + --var-file) VAR_FILE="$2"; shift 2 ;; + --restore-stage-dev) RESTORE_STAGE_DEV="true"; shift ;; + --stage-ssh-user) STAGE_SSH_USER="$2"; shift 2 ;; + --stage-ssh-key) STAGE_SSH_KEY="$2"; shift 2 ;; + --stage-app-dir) STAGE_APP_DIR="$2"; shift 2 ;; + --stage-compose-file) STAGE_COMPOSE_FILE="$2"; shift 2 ;; + --skip-terraform-destroy) SKIP_TERRAFORM_DESTROY="true"; shift ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +require_command() { + if ! command -v "$1" >/dev/null 2>&1; then + echo "Required command not found: $1" >&2 + exit 1 + fi +} + +require_command terraform +require_command aws +require_command jq +require_command ssh + +require_value() { + local name="$1" + local value="$2" + if [[ -z "$value" ]]; then + echo "Missing required option: $name" >&2 + exit 1 + fi +} + +tf_output() { + terraform -chdir="$TERRAFORM_DIR" output -raw "$1" +} + +send_ssm_command() { + local instance_id="$1" + local comment="$2" + local commands_json="$3" + + local command_id + command_id="$(aws ssm send-command \ + --instance-ids "$instance_id" \ + --document-name "AWS-RunShellScript" \ + --comment "$comment" \ + --parameters "$commands_json" \ + --query "Command.CommandId" \ + --output text)" + + local status + while true; do + sleep 5 + status="$(aws ssm get-command-invocation \ + --command-id "$command_id" \ + --instance-id "$instance_id" \ + --query "Status" \ + --output text 2>/dev/null || true)" + + case "$status" in + Pending|InProgress|Delayed|"") continue ;; + Success) break ;; + *) + aws ssm get-command-invocation \ + --command-id "$command_id" \ + --instance-id "$instance_id" \ + --output json || true + echo "SSM command failed with status $status: $comment" >&2 + exit 1 + ;; + esac + done +} + +if [[ "$RESTORE_STAGE_DEV" == "true" ]]; then + require_value "--stage-ssh-key" "$STAGE_SSH_KEY" + stage_public_ip="$(tf_output stage_api_public_ip)" + + ssh -i "$STAGE_SSH_KEY" \ + -o StrictHostKeyChecking=no \ + "$STAGE_SSH_USER@$stage_public_ip" \ + "APP_DIR='$STAGE_APP_DIR' COMPOSE_FILE='$STAGE_COMPOSE_FILE' bash -s" <<'REMOTE' +set -euo pipefail + +cd "$APP_DIR" + +CURRENT_IMAGE="$(docker inspect -f '{{.Config.Image}}' solid-connection-dev 2>/dev/null || true)" +if [[ -z "$CURRENT_IMAGE" ]]; then + echo "solid-connection-dev container is not running; cannot infer image tag" >&2 + exit 1 +fi + +OWNER_LOWERCASE="$(echo "$CURRENT_IMAGE" | sed -E 's#^ghcr.io/([^/]+)/.*#\1#')" +IMAGE_TAG="$(echo "$CURRENT_IMAGE" | sed -E 's#.*:([^:]+)$#\1#')" + +rm -f docker-compose.loadtest.override.yml +docker compose -f "$COMPOSE_FILE" down || true +OWNER_LOWERCASE="$OWNER_LOWERCASE" IMAGE_TAG="$IMAGE_TAG" docker compose -f "$COMPOSE_FILE" up -d +REMOTE +fi + +if [[ "$SKIP_TERRAFORM_DESTROY" != "true" ]]; then + terraform -chdir="$TERRAFORM_DIR" destroy -auto-approve -var-file="$VAR_FILE" +fi + +echo "Load test environment has been stopped."