diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 8609640..08f257d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -13,12 +13,17 @@ env: AWS_REGION: ${{ secrets.AWS_REGION }} TERRAFORM_VERSION: 1.7.5 NODE_VERSION: '20' - CHECK_FAIL_ON: 'NONE' + SUPABASE_URL: ${{ secrets.SUPABASE_URL }} + SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + AB_TRAFFIC_WEIGHT: '0.5' jobs: - static-analysis: - name: Static Analysis + terraform-plan: + name: Terraform Plan runs-on: ubuntu-latest + env: + TF_VAR_supabase_url: ${{ env.SUPABASE_URL }} + TF_VAR_supabase_key: ${{ env.SUPABASE_KEY }} steps: - name: Checkout uses: actions/checkout@v4 @@ -27,77 +32,23 @@ jobs: uses: actions/setup-node@v4 with: node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: | + frontend/package-lock.json + lambda/package-lock.json - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Install Checkov - run: pip install checkov - - - name: Install backend dependencies - if: ${{ hashFiles('backend/package.json') != '' }} - working-directory: backend - run: | - if [ -f package-lock.json ]; then - npm ci - else - npm install - fi - - - name: Install frontend dependencies - if: ${{ hashFiles('frontend/package.json') != '' }} - working-directory: frontend - run: | - if [ -f package-lock.json ]; then - npm ci - else - npm install - fi - - - name: Run Checkov on Terraform - run: | - if [ -d terraform ]; then - set +e - checkov -d terraform --framework terraform --output cli --output json --output-file-path . - status=$? - if [ -f results_json.json ]; then - mv results_json.json checkov_results.json - else - echo '{"summary":{"failed":0,"passed":0,"skipped":0},"results":{"failed_checks":[]}}' > checkov_results.json - fi - if [ "${{ env.CHECK_FAIL_ON }}" = "FAILURE" ] && [ $status -ne 0 ]; then - echo "Checkov detected failures" >&2 - cat checkov_results.json - exit $status - fi - else - echo '{"summary":{"failed":0,"passed":0,"skipped":0},"results":{"failed_checks":[]}}' > checkov_results.json - fi - - - name: Upload Checkov report - uses: actions/upload-artifact@v4 + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 with: - name: static-analysis-results - path: checkov_results.json - - frontend-build: - name: Frontend Build - runs-on: ubuntu-latest - needs: static-analysis - steps: - - name: Checkout - uses: actions/checkout@v4 + role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} - - name: Set up Node.js - uses: actions/setup-node@v4 + - name: Set up Terraform + uses: hashicorp/setup-terraform@v3 with: - node-version: ${{ env.NODE_VERSION }} - cache: 'npm' - cache-dependency-path: frontend/package-lock.json + terraform_version: ${{ env.TERRAFORM_VERSION }} - - name: Install dependencies + - name: Install frontend dependencies working-directory: frontend run: | if [ -f package-lock.json ]; then @@ -110,67 +61,6 @@ jobs: working-directory: frontend run: npm run build - - name: List dist contents - working-directory: frontend - run: | - ls -la dist - ls -la dist/assets || echo "Assets directory not found!" - - - name: Upload frontend artifact - uses: actions/upload-artifact@v4 - with: - name: frontend-dist - path: frontend/dist - - create-lambda-bucket: - name: Ensure Lambda S3 Bucket - runs-on: ubuntu-latest - needs: [static-analysis, frontend-build] - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - - name: Ensure Lambda artifact bucket exists - id: bucket - run: | - ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) - BUCKET="transaction-simulator-lambda-code-${ACCOUNT_ID}-${AWS_REGION}" - aws s3 mb "s3://${BUCKET}" 2>/dev/null || echo "Bucket already exists" - echo "bucket=${BUCKET}" >> "$GITHUB_OUTPUT" - - outputs: - bucket: ${{ steps.bucket.outputs.bucket }} - - lambda-package: - name: Package Lambda Function - runs-on: ubuntu-latest - needs: create-lambda-bucket - outputs: - lambda-bucket: ${{ steps.lambda-info.outputs.lambda_bucket }} - lambda-key: ${{ steps.lambda-info.outputs.lambda_key }} - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - cache: 'npm' - cache-dependency-path: lambda/package-lock.json - - name: Install Lambda dependencies working-directory: lambda run: | @@ -180,77 +70,33 @@ jobs: npm install fi - - name: Create Lambda package - run: zip -r lambda.zip lambda - - - name: Upload Lambda package to S3 - id: lambda-info - env: - BUCKET: ${{ needs.create-lambda-bucket.outputs.bucket }} - run: | - ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) - BUCKET_NAME="${BUCKET:-transaction-simulator-lambda-code-${ACCOUNT_ID}-${AWS_REGION}}" - KEY="lambda-packages/lambda-${GITHUB_SHA}.zip" - aws s3 cp lambda.zip "s3://${BUCKET_NAME}/${KEY}" --metadata-directive REPLACE --acl bucket-owner-full-control --sse AES256 - echo "LAMBDA_S3_BUCKET=${BUCKET_NAME}" > build.env - echo "LAMBDA_S3_KEY=${KEY}" >> build.env - echo "lambda_bucket=${BUCKET_NAME}" >> "$GITHUB_OUTPUT" - echo "lambda_key=${KEY}" >> "$GITHUB_OUTPUT" - echo "account_id=${ACCOUNT_ID}" >> "$GITHUB_OUTPUT" - - - name: Upload Lambda artifacts - uses: actions/upload-artifact@v4 - with: - name: lambda-artifacts - path: | - lambda.zip - build.env - - terraform-plan: - name: Terraform Plan - runs-on: ubuntu-latest - needs: lambda-package - outputs: - plan-path: tfplan - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - - name: Set up Terraform - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: ${{ env.TERRAFORM_VERSION }} - - - name: Download Lambda metadata - uses: actions/download-artifact@v4 - with: - name: lambda-artifacts - path: artifacts/lambda - - - name: Load Lambda S3 location - id: load-lambda - run: | - set -euo pipefail - source artifacts/lambda/build.env - echo "bucket=$LAMBDA_S3_BUCKET" >> "$GITHUB_OUTPUT" - echo "key=$LAMBDA_S3_KEY" >> "$GITHUB_OUTPUT" - - name: Terraform init working-directory: terraform run: terraform init -input=false -reconfigure + - name: Terraform validate + working-directory: terraform + run: terraform validate + - name: Terraform plan working-directory: terraform - env: - LAMBDA_S3_BUCKET: ${{ steps.load-lambda.outputs.bucket }} - LAMBDA_S3_KEY: ${{ steps.load-lambda.outputs.key }} - run: terraform plan -var="lambda_s3_bucket=${LAMBDA_S3_BUCKET}" -var="lambda_s3_key=${LAMBDA_S3_KEY}" -out=tfplan + run: terraform plan -input=false -out=tfplan + + - name: Capture current Lambda alias versions + id: snapshot-aliases + run: | + set -euo pipefail + mkdir -p artifacts + FUNCTION_NAME="TransactionSimulatorAPI" + LIVE_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name LIVE --query 'FunctionVersion' --output text 2>/dev/null || true) + BETA_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name BETA --query 'FunctionVersion' --output text 2>/dev/null || true) + cat < artifacts/lambda_alias_versions.json + { + "function_name": "$FUNCTION_NAME", + "live_version": "${LIVE_VERSION:-}", + "beta_version": "${BETA_VERSION:-}" + } + JSON - name: Upload Terraform plan uses: actions/upload-artifact@v4 @@ -258,6 +104,12 @@ jobs: name: terraform-plan path: terraform/tfplan + - name: Upload alias snapshot + uses: actions/upload-artifact@v4 + with: + name: pre-deploy-alias-snapshot + path: artifacts/lambda_alias_versions.json + manual-approval: name: Await Manual Approval runs-on: ubuntu-latest @@ -266,19 +118,28 @@ jobs: name: production steps: - name: Approval required - run: | - echo "This job requires approval in the production environment before continuing." + run: echo "Approve this job in the production environment to continue." terraform-apply: name: Terraform Apply runs-on: ubuntu-latest needs: manual-approval - outputs: - outputs-file: terraform_outputs.json + env: + TF_VAR_supabase_url: ${{ env.SUPABASE_URL }} + TF_VAR_supabase_key: ${{ env.SUPABASE_KEY }} steps: - name: Checkout uses: actions/checkout@v4 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: | + frontend/package-lock.json + lambda/package-lock.json + - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 with: @@ -290,23 +151,35 @@ jobs: with: terraform_version: ${{ env.TERRAFORM_VERSION }} - - name: Download Lambda metadata - uses: actions/download-artifact@v4 - with: - name: lambda-artifacts - path: artifacts/lambda + - name: Install frontend dependencies + working-directory: frontend + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - - name: Download Terraform plan - uses: actions/download-artifact@v4 - with: - name: terraform-plan - path: artifacts/terraform + - name: Build frontend + working-directory: frontend + run: npm run build + + - name: Install Lambda dependencies + working-directory: lambda + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - - name: Apply Terraform plan + - name: Terraform init working-directory: terraform - env: - TF_CLI_ARGS: -input=false - run: terraform apply -auto-approve ../artifacts/terraform/tfplan + run: terraform init -input=false -reconfigure + + - name: Terraform apply + working-directory: terraform + run: terraform apply -input=false -auto-approve - name: Capture Terraform outputs working-directory: terraform @@ -318,10 +191,10 @@ jobs: name: terraform-outputs path: terraform_outputs.json - lambda-canary-deployment: - name: Lambda Canary Deployment + lambda-ab-test: + name: Configure Lambda A/B Routing runs-on: ubuntu-latest - needs: [terraform-apply, lambda-package] + needs: terraform-apply steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 @@ -329,37 +202,72 @@ jobs: role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} aws-region: ${{ env.AWS_REGION }} - - name: Trigger CodeDeploy deployment + - name: Download Terraform outputs + uses: actions/download-artifact@v4 + with: + name: terraform-outputs + path: artifacts + + - name: Download alias snapshot + uses: actions/download-artifact@v4 + with: + name: pre-deploy-alias-snapshot + path: artifacts + + - name: Install jq + run: sudo apt-get update -y && sudo apt-get install -y jq + + - name: Configure traffic weights env: - APPLICATION_NAME: ${{ secrets.CODEDEPLOY_APPLICATION_NAME }} - DEPLOYMENT_GROUP: ${{ secrets.CODEDEPLOY_DEPLOYMENT_GROUP }} - LAMBDA_BUCKET: ${{ needs.lambda-package.outputs['lambda-bucket'] }} - LAMBDA_KEY: ${{ needs.lambda-package.outputs['lambda-key'] }} + ROUTING_WEIGHT: ${{ env.AB_TRAFFIC_WEIGHT }} run: | - if [ -z "$APPLICATION_NAME" ] || [ -z "$DEPLOYMENT_GROUP" ]; then - echo "CodeDeploy application or deployment group secrets are not configured." >&2 + set -euo pipefail + SNAPSHOT_FILE="artifacts/lambda_alias_versions.json" + OUTPUT_FILE="artifacts/terraform_outputs.json" + + if [ ! -f "$SNAPSHOT_FILE" ]; then + echo "Alias snapshot not found; skipping A/B configuration." + exit 0 + fi + + PREVIOUS_VERSION=$(jq -r '.live_version // ""' "$SNAPSHOT_FILE") + FUNCTION_NAME=$(jq -r '.lambda_function_name.value // ""' "$OUTPUT_FILE") + + if [ -z "$FUNCTION_NAME" ] || [ "$FUNCTION_NAME" = "null" ]; then + echo "Unable to determine Lambda function name; skipping A/B configuration." >&2 exit 1 fi - DEPLOYMENT_ID=$(aws deploy create-deployment \ - --application-name "$APPLICATION_NAME" \ - --deployment-group-name "$DEPLOYMENT_GROUP" \ - --s3-location bucket=$LAMBDA_BUCKET,key=$LAMBDA_KEY,bundleType=zip \ - --query deploymentId --output text) - echo "Started deployment $DEPLOYMENT_ID" - aws deploy wait deployment-successful --deployment-id "$DEPLOYMENT_ID" - echo "Deployment $DEPLOYMENT_ID completed successfully." + + if [ -z "$PREVIOUS_VERSION" ] || [ "$PREVIOUS_VERSION" = "None" ]; then + echo "No previous live version recorded; skipping A/B configuration." + exit 0 + fi + + NEW_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name BETA --query 'FunctionVersion' --output text) + + if [ -z "$NEW_VERSION" ] || [ "$NEW_VERSION" = "None" ]; then + echo "Unable to determine beta alias version; skipping A/B configuration." + exit 0 + fi + + if [ "$PREVIOUS_VERSION" = "$NEW_VERSION" ]; then + echo "Live and beta versions match; nothing to route." + exit 0 + fi + + echo "Routing $(echo "$ROUTING_WEIGHT" | awk '{printf "%s%%", $1*100}') of traffic to version $NEW_VERSION while keeping version $PREVIOUS_VERSION as primary." + + aws lambda update-alias \ + --function-name "$FUNCTION_NAME" \ + --name LIVE \ + --function-version "$PREVIOUS_VERSION" \ + --routing-config "AdditionalVersionWeights={\"$NEW_VERSION\"=$ROUTING_WEIGHT}" smoke-tests: name: Smoke Tests runs-on: ubuntu-latest - needs: lambda-canary-deployment + needs: lambda-ab-test steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - name: Download Terraform outputs uses: actions/download-artifact@v4 with: @@ -373,16 +281,45 @@ jobs: env: TERRAFORM_OUTPUTS: artifacts/terraform_outputs.json run: | - API_URL=$(jq -r '.api_gateway_invoke_url.value' "$TERRAFORM_OUTPUTS") - FRONTEND_URL=$(jq -r '.frontend_website_url.value' "$TERRAFORM_OUTPUTS") - if [ -z "$API_URL" ]; then + set -euo pipefail + API_URL=$(jq -r '.api_gateway_invoke_url.value // ""' "$TERRAFORM_OUTPUTS") + FRONTEND_URL=$(jq -r '.frontend_website_url.value // ""' "$TERRAFORM_OUTPUTS") + + if [ -z "$API_URL" ] || [ "$API_URL" = "null" ]; then echo "ERROR: API_URL is empty." >&2 exit 1 fi - if [ -z "$FRONTEND_URL" ]; then + + if [ -z "$FRONTEND_URL" ] || [ "$FRONTEND_URL" = "null" ]; then echo "ERROR: FRONTEND_URL is empty." >&2 exit 1 fi - curl -v --fail "$API_URL/api/login" - curl -v --fail "$FRONTEND_URL" + + LOGIN_ENDPOINT="${API_URL%/}/api/login" + + echo "Testing backend login endpoint at ${LOGIN_ENDPOINT}" + API_STATUS=$(curl -sS -o api_response.json -w '%{http_code}' \ + -X POST "$LOGIN_ENDPOINT" \ + -H 'Content-Type: application/json' \ + -d '{"username":"admin","password":"password123"}') + + if [ "$API_STATUS" -ne 200 ]; then + echo "Login endpoint returned status $API_STATUS" >&2 + cat api_response.json >&2 || true + exit 1 + fi + + jq -e '.success == true' api_response.json >/dev/null + + echo "Testing frontend availability at $FRONTEND_URL" + FRONTEND_STATUS=$(curl -sS -o frontend_response.html -w '%{http_code}' "$FRONTEND_URL") + + if [ "$FRONTEND_STATUS" -ge 400 ]; then + echo "Frontend returned status $FRONTEND_STATUS" >&2 + cat frontend_response.html >&2 || true + exit 1 + fi + + grep -q "Payment Simulator" frontend_response.html + echo "Smoke tests completed successfully." diff --git a/README.md b/README.md index 103482c..74b01fe 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ A robust payment transaction simulator designed to test and demonstrate payment - **Database**: Supabase - **Build Tool**: Vite - **Backend**: Express.js with Apollo Server (TypeScript) -- **Deployment**: Render +- **Deployment**: AWS (Terraform + GitHub Actions) - **Containerization**: Docker --- @@ -203,8 +203,10 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file ## 📊 **Infrastructure and Deployment** ### **Production Environment** -- **Frontend Hosting**: Render -- **Backend Hosting**: Render +- **Frontend Hosting**: Amazon S3 static website +- **Backend Hosting**: AWS Lambda behind API Gateway +- **Infrastructure as Code**: Terraform +- **CI/CD**: GitHub Actions (manual approval + smoke tests) - **Database**: Supabase ### **Environment Variables** diff --git a/appspec.yml b/appspec.yml index 83d0454..e412f75 100644 --- a/appspec.yml +++ b/appspec.yml @@ -9,4 +9,4 @@ Resources: TargetVersion: "2" Hooks: - BeforeAllowTraffic: "TransactionSimulatorAPIPreTrafficHook" - - AfterAllowTraffic: "TransactionSimulatorAPIPostTrafficHook" \ No newline at end of file + - AfterAllowTraffic: "TransactionSimulatorAPIPostTrafficHook" diff --git a/buildspec-create-lambda-s3-bucket.yml b/buildspec-create-lambda-s3-bucket.yml deleted file mode 100644 index ff2021f..0000000 --- a/buildspec-create-lambda-s3-bucket.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 0.2 -phases: - build: - commands: - - echo "Fetching AWS Account ID..." - - export AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) - - export LAMBDA_CODE_S3_BUCKET="transaction-simulator-lambda-code-${AWS_ACCOUNT_ID}-${AWS_DEFAULT_REGION}" - - echo "Attempting to create S3 bucket $LAMBDA_CODE_S3_BUCKET in region $AWS_DEFAULT_REGION" - - aws s3 mb s3://${LAMBDA_CODE_S3_BUCKET} --region ${AWS_DEFAULT_REGION} || true - - echo "S3 bucket creation attempt complete." \ No newline at end of file diff --git a/buildspec-frontend-build.yml b/buildspec-frontend-build.yml deleted file mode 100644 index 9daae11..0000000 --- a/buildspec-frontend-build.yml +++ /dev/null @@ -1,34 +0,0 @@ -version: 0.2 - -phases: - install: - commands: - - echo Installing Node.js... - - curl -fsSL https://deb.nodesource.com/setup_20.x | bash - - - apt-get install -y nodejs - - cd frontend - - echo Checking Node.js version... - - node -v - - npm -v - - echo Installing frontend dependencies... - - npm install - - cd .. - build: - commands: - - echo Building frontend... - - cd frontend - - npm run build - - echo Checking frontend/dist contents... - - ls -la dist - - ls -la dist/assets || echo "Assets directory not found!" - - cd .. - post_build: - commands: - - echo "Frontend build complete." -artifacts: - files: - - '**/*' - base-directory: frontend/dist -cache: - paths: - - 'frontend/node_modules/**' \ No newline at end of file diff --git a/buildspec-lambda-canary-deployment.yml b/buildspec-lambda-canary-deployment.yml deleted file mode 100644 index 47cabd6..0000000 --- a/buildspec-lambda-canary-deployment.yml +++ /dev/null @@ -1,61 +0,0 @@ -# buildspec-lambda-canary-deployment.yml -# This buildspec represents a placeholder for CodeDeploy's Lambda deployment hooks -# when performing a canary rollout. CodeDeploy manages traffic shifting. -# In this file, we assume CodeDeploy is handling the actual traffic shift based on -# its deployment group configuration. -# This specific buildspec is for demonstrating custom actions or post-traffic shift validation. -version: 0.2 - -# CodeDeploy's Lambda deployments don't typically run a buildspec.yml directly -# instead, CodeDeploy uses its own mechanism for traffic shifting and optional Lambda hooks. -# This buildspec exists as a conceptual placeholder if you were to have a CodeBuild -# action in CodePipeline that *manually* triggers a CodeDeploy Lambda deployment -# or performs post-deployment actions. - -# However, for AWS CodePipeline's direct integration with AWS CodeDeploy for Lambda, -# you use the 'AWS CodeDeploy' action type with 'DeploymentStyle: Lambda' in CodePipeline. -# This action type handles the traffic shifting directly, and doesn't explicitly -# execute a buildspec.yml for the traffic shift itself. - -# This buildspec is primarily for a scenario where you might want to perform -# custom pre-traffic hook validation or post-traffic hook validation. - -# Example: If you had a custom CodeBuild action for Post-Traffic Hook Validation -# phases: -# install: -# commands: -# - echo "Setting up environment for validation..." -# - sudo apt-get update -y -# - sudo apt-get install -y curl jq -# -# build: -# commands: -# - echo "Retrieving Lambda alias info for validation..." -# # You would typically pass the current version, target version, alias names -# # as environment variables to this CodeBuild project via CodePipeline. -# - CURRENT_LIVE_VERSION=$(aws lambda get-alias --function-name TransactionSimulatorAPI --name LIVE | jq -r '.FunctionVersion') -# - echo "Current LIVE alias version: $CURRENT_LIVE_VERSION" -# - # Perform actual validation against the new version or blended traffic -# - API_URL=$(jq -r '.api_gateway_invoke_url.value' terraform_outputs.json) # Assuming terraform_outputs.json is available -# - echo "Running health check on API: $API_URL" -# - curl -sS -f "$API_URL/api/login" # Example health check -# - echo "Validation successful." -# -# post_build: -# commands: -# - echo "Custom post-traffic hook validation complete." -# -# artifacts: -# files: [] # No specific artifacts needed from this stage - -# For now, this will be an empty buildspec as the primary canary deployment is handled by CodeDeploy's Lambda action itself. -# We will ensure the CodePipeline definition uses the direct CodeDeploy Lambda action type. -# This file still needs to exist to be referenced by a CodeBuild project in CodePipeline. -phases: - build: - commands: - - echo "This buildspec is a placeholder. Lambda traffic shifting is managed directly by AWS CodeDeploy." - - echo "CodeDeploy's Lambda deployments do not execute a buildspec.yml directly for traffic shifting." - - echo "Post-deployment smoke tests will occur in a separate stage after traffic is fully shifted." -artifacts: - files: [] \ No newline at end of file diff --git a/buildspec-lambda-package.yml b/buildspec-lambda-package.yml deleted file mode 100644 index 4fd9e7f..0000000 --- a/buildspec-lambda-package.yml +++ /dev/null @@ -1,59 +0,0 @@ -version: 0.2 - -phases: - install: - runtime-versions: - nodejs: 20 - commands: - - echo "Installing Node.js version 20 ..." - - n $NODE_20_VERSION - - echo "Installing dependencies for Lambda function..." - - cd lambda - - npm install - - echo "Dependencies installed." - - cd .. # Ensure you are back at the root of the source directory - build: - commands: - - echo "Zipping Lambda function code..." - - zip -r lambda.zip lambda - - echo "Lambda code zipped." - - export LAMBDA_CODE_S3_BUCKET="transaction-simulator-lambda-code-${AWS_ACCOUNT_ID}-${AWS_DEFAULT_REGION}" - - echo "Lambda S3 Bucket $LAMBDA_CODE_S3_BUCKET" - - export LAMBDA_S3_KEY="lambda-packages/lambda-${CODEBUILD_RESOLVED_SOURCE_VERSION}.zip" - - echo "Lambda S3 Key $LAMBDA_S3_KEY" - - echo "Uploading lambda.zip to S3..." - - aws s3 cp lambda.zip "s3://${LAMBDA_CODE_S3_BUCKET}/${LAMBDA_S3_KEY}" --metadata-directive REPLACE --acl bucket-owner-full-control --sse AES256 - - echo "Lambda.zip uploaded to S3." - - echo "LAMBDA_S3_BUCKET=$LAMBDA_CODE_S3_BUCKET" > build.env - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" >> build.env - - echo "build.env file created with S3 location details." - post_build: - commands: - - echo "Lambda packaging and upload completed." - - echo "--- Checking build.env location and content ---" - - ls -la build.env # Confirm build.env exists at root - - cat build.env # Display its content - - echo "--- End checking build.env ---" - - echo "Contents of current directory before artifact packaging:" # DIAGNOSTIC - - ls -laR $CODEBUILD_SRC_DIR # DIAGNOSTIC: List everything recursively from the source dir - - echo "End of diagnostic ls -laR" # DIAGNOSTIC - -artifacts: - files: - - lambda.zip - name: $(if [ "$CODEBUILD_INITIATOR" = "codepipeline" ]; then echo "LambdaArtifact"; else echo "BuildOutput"; fi) - discard-paths: no - secondary-artifacts: - LambdaArtifact: - files: - - lambda.zip - discard-paths: yes - LambdaS3LocationArtifact: - files: - - build.env - discard-paths: yes - name: LambdaPackageArtifact -cache: - paths: - - 'lambda/node_modules/**' - - '/usr/local/n/versions/node/**' \ No newline at end of file diff --git a/buildspec-smoke-tests.yml b/buildspec-smoke-tests.yml deleted file mode 100644 index 4cd752e..0000000 --- a/buildspec-smoke-tests.yml +++ /dev/null @@ -1,42 +0,0 @@ -# buildspec-smoke-tests.yml -# This buildspec runs automated tests against the deployed application (API Gateway and Frontend). -# This file will be placed in the root of your 'my-payment-simulator' repository. -version: 0.2 - -phases: - install: - commands: - # Install curl and jq for making HTTP requests and parsing JSON outputs. - - echo "Installing curl and jq for smoke tests..." - - sudo apt-get update -y - - sudo apt-get install -y curl jq - - build: - commands: - - echo "Running smoke tests..." - # Extract API Gateway and Frontend URLs from the 'terraform_outputs.json' artifact. - # This file is generated by the 'Terraform Apply' stage. - - API_URL=$(jq -r '.api_gateway_invoke_url.value' terraform_outputs.json) - - FRONTEND_URL=$(jq -r '.frontend_website_url.value' terraform_outputs.json) - - # Basic validation to ensure URLs are not empty before testing. - - if [ -z "$API_URL" ]; then echo "ERROR API_URL is empty. Cannot run API tests."; exit 1; fi - - if [ -z "$FRONTEND_URL" ]; then echo "ERROR FRONTEND_URL is empty. Cannot run Frontend tests."; exit 1; fi - - - echo "Testing API Gateway URL $API_URL" - # Perform a basic health check on the API login endpoint. - # '--fail' will cause curl to exit with an error code if the HTTP status is >= 400. - - curl -v --fail "$API_URL/api/login" - - echo "API Gateway smoke test passed." - - - echo "Testing Frontend URL $FRONTEND_URL" - # Check if the frontend is accessible by making a request to its root URL. - - curl -v --fail "$FRONTEND_URL" - - echo "Frontend smoke test passed." - - post_build: - commands: - - echo "Smoke tests completed successfully." - -artifacts: - files: [] # No specific artifacts needed from this final testing stage. \ No newline at end of file diff --git a/buildspec-static-analysis.yml b/buildspec-static-analysis.yml deleted file mode 100644 index 6a3d5b9..0000000 --- a/buildspec-static-analysis.yml +++ /dev/null @@ -1,88 +0,0 @@ -version: 0.2 - -env: - variables: - CHECK_FAIL_ON: "NONE" # Set to "NONE" or "FAILURE" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Checkov (IaC security scanner)..." - - pip install checkov - - - | - if [ -d "backend" ] && [ -f "backend/package.json" ]; then - echo "Installing backend dependencies..." - cd backend/ - npm ci --only=dev || npm install --only=dev # This will now run, but install nothing if no devDeps are listed - cd .. - else - echo "No backend/package.json found." - fi - - - | - if [ -d "frontend" ] && [ -f "frontend/package.json" ]; then - echo "Installing frontend dependencies..." - cd frontend/ - npm ci --only=dev || npm install --only=dev # This will now run, but install nothing if no devDeps are listed - cd .. - else - echo "No frontend/package.json found." - fi - - pre_build: - commands: - - echo "Verifying tool installations..." - - checkov --version - - node --version - build: - commands: - - echo "Running Checkov for IaC security scanning on Terraform files..." - - | - if [ -d "terraform" ]; then - checkov -d terraform/ --framework terraform --output cli --output json --output-file-path . || CHECKOV_EXIT_CODE=$? - if [ -f "results_json.json" ]; then - mv results_json.json checkov_results.json - else - echo '{"summary": {"failed": 0, "passed": 0, "skipped": 0}, "results": {"failed_checks": []}}' > checkov_results.json - fi - - # Check if we should fail on Checkov findings - if [ "$CHECK_FAIL_ON" == "FAILURE" ] && [ "${CHECKOV_EXIT_CODE:-0}" -ne 0 ]; then - echo "Checkov found security issues. Review checkov_results.json for details." - cat checkov_results.json - exit 1 - fi - else - echo "No terraform/ directory found, skipping Checkov scan" - echo '{"summary": {"failed": 0, "passed": 0, "skipped": 0}, "results": {"failed_checks": []}}' > checkov_results.json - fi - - post_build: - commands: - - echo "Static analysis complete. Generating summary report..." - - echo "=== STATIC ANALYSIS SUMMARY ===" - - | - if [ -f "checkov_results.json" ]; then - CHECKOV_FAILED=$(cat checkov_results.json | jq -r '.summary.failed // 0' 2>/dev/null || echo "0") - CHECKOV_PASSED=$(cat checkov_results.json | jq -r '.summary.passed // 0' 2>/dev/null || echo "0") - echo "Checkov: $CHECKOV_FAILED failed, $CHECKOV_PASSED passed" - fi - - echo "Review detailed results in the artifacts for specific findings." - -artifacts: - files: - - 'checkov_results.json' - name: static-analysis-results - -cache: - paths: - - '/usr/local/n/versions/node/**' - - '/root/.cache/pip/**' - - '/usr/local/lib/python3.11/site-packages/**' - - '/usr/local/lib/node_modules/**' - - 'backend/node_modules/**' - - 'frontend/node_modules/**' \ No newline at end of file diff --git a/buildspec-terraform-apply.yml b/buildspec-terraform-apply.yml deleted file mode 100644 index 235b6d5..0000000 --- a/buildspec-terraform-apply.yml +++ /dev/null @@ -1,46 +0,0 @@ -version: 0.2 - -env: - variables: - TF_VERSION: "1.7.5" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Terraform..." - - mkdir -p /tmp/terraform_install - - cd /tmp/terraform_install - - wget https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip - - unzip -o terraform_${TF_VERSION}_linux_amd64.zip - - mv terraform /usr/local/bin/ - - cd $CODEBUILD_SRC_DIR - - terraform version - - - echo "Loading Lambda S3 location from previous stage..." - # CRITICAL CHANGE: Use LambdaS3LocationArtifact as the directory name - - . LambdaS3LocationArtifact/build.env - - echo "LAMBDA_S3_BUCKET=$LAMBDA_S3_BUCKET" - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" - - build: - commands: - - echo "Initializing Terraform..." - - cd terraform - - terraform init -input=false -reconfigure - - - echo "Applying Terraform plan..." - - terraform apply -auto-approve ../TerraformPlanArtifact/tfplan -input=false - - post_build: - commands: - - echo "Generating Terraform outputs..." - - terraform output -json > ../terraform_outputs.json - - echo "Terraform apply completed." - -artifacts: - files: - - 'terraform_outputs.json' - name: TerraformOutputsArtifact \ No newline at end of file diff --git a/buildspec-terraform-plan.yml b/buildspec-terraform-plan.yml deleted file mode 100644 index 9eb156f..0000000 --- a/buildspec-terraform-plan.yml +++ /dev/null @@ -1,110 +0,0 @@ -version: 0.2 - -env: - variables: - TF_VERSION: "1.7.5" - TF_PLAN_FILE: "tfplan" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Terraform..." - - mkdir -p /tmp/terraform_install - - cd /tmp/terraform_install - - wget --tries=5 --timeout=30 https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip - - unzip -o terraform_${TF_VERSION}_linux_amd64.zip - - mv terraform /usr/local/bin/ - - cd $CODEBUILD_SRC_DIR - - echo "Verifying Terraform installation..." - - terraform --version - - pre_build: - commands: - - echo "=== COMPREHENSIVE DIAGNOSTICS START ===" - - echo "Current working directory:" - - pwd - - echo "CODEBUILD_SRC_DIR value:" - - echo $CODEBUILD_SRC_DIR - - echo "CODEBUILD_SRC_DIR_LambdaS3LocationArtifact value:" - - echo $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact - - echo "Contents of CODEBUILD_SRC_DIR:" - - ls -la $CODEBUILD_SRC_DIR - - echo "Recursive listing of CODEBUILD_SRC_DIR:" - - ls -laR $CODEBUILD_SRC_DIR - - echo "Contents of LambdaS3LocationArtifact:" - - ls -la $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact || echo "Directory not found or empty." - - echo "Recursive listing of LambdaS3LocationArtifact:" - - ls -laR $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact || echo "Directory not found or empty." - - echo "Environment variables (filtered):" - - env | grep -E "(CODEBUILD|ARTIFACT)" || echo "No CODEBUILD/ARTIFACT env vars found" - - | - for dir in LambdaS3LocationArtifact LambdaPackageArtifact SourceOutput; do - echo "Checking for directory: $dir" - if [ -d "$dir" ]; then - echo " Directory $dir exists:" - ls -la "$dir/" - if [ -f "$dir/build.env" ]; then - echo " build.env found in $dir:" - cat "$dir/build.env" - else - echo " build.env NOT found in $dir" - fi - else - echo " Directory $dir does not exist" - fi - done - - echo "Searching for build.env in /codebuild/output:" - - find /codebuild/output -name "build.env" -type f 2>/dev/null || echo "No build.env files found" - - echo "=== COMPREHENSIVE DIAGNOSTICS END ===" - - build: - commands: - - echo "Loading Lambda S3 location from previous stage..." - - | - BUILD_ENV_PATH="" - for path in "$CODEBUILD_SRC_DIR_LambdaS3LocationArtifact" \ - "$CODEBUILD_SRC_DIR/LambdaS3LocationArtifact" \ - "$CODEBUILD_SRC_DIR/LambdaPackageArtifact"; do - if [ -f "$path/build.env" ]; then - BUILD_ENV_PATH="$path/build.env" - break - fi - done - - if [ -z "$BUILD_ENV_PATH" ]; then - echo "ERROR: build.env not found in any expected location!" - exit 1 - fi - - echo "Sourcing build.env from $BUILD_ENV_PATH" - . "$BUILD_ENV_PATH" - - echo "LAMBDA_S3_BUCKET=$LAMBDA_S3_BUCKET" - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" - - | - if [ -z "$LAMBDA_S3_BUCKET" ] || [ -z "$LAMBDA_S3_KEY" ]; then - echo "ERROR: Required environment variables not set!" - exit 1 - fi - - | - if [ ! -d "terraform" ]; then - echo "ERROR: 'terraform' directory not found!" - exit 1 - fi - cd terraform - - terraform init -reconfigure - - terraform plan -var="lambda_s3_bucket=${LAMBDA_S3_BUCKET}" -var="lambda_s3_key=${LAMBDA_S3_KEY}" -out=${TF_PLAN_FILE} - - post_build: - commands: - - echo "Uploading Terraform plan artifact..." - - mv ${TF_PLAN_FILE} $CODEBUILD_SRC_DIR/ - - echo "Terraform plan completed and artifact uploaded." - -artifacts: - files: - - 'tfplan' - discard-paths: yes - name: TerraformPlanArtifact \ No newline at end of file diff --git a/lambda/lambda.js b/lambda/lambda.js index f48ae50..4186dcc 100644 --- a/lambda/lambda.js +++ b/lambda/lambda.js @@ -299,4 +299,4 @@ if (path === "/api/transactions" && httpMethod === "POST") { method: httpMethod }) }; -}; \ No newline at end of file +}; diff --git a/lambda/package.json b/lambda/package.json index c0f4e4a..8135536 100644 --- a/lambda/package.json +++ b/lambda/package.json @@ -5,4 +5,4 @@ "@supabase/supabase-js": "^2.49.3", "node-fetch": "^3.3.2" } -} \ No newline at end of file +} diff --git a/terraform/frontend.tf b/terraform/frontend.tf index 2db6373..2f95c7c 100644 --- a/terraform/frontend.tf +++ b/terraform/frontend.tf @@ -73,9 +73,3 @@ resource "aws_s3_bucket_cors_configuration" "frontend_cors" { max_age_seconds = 3000 } } - -#frontend_website_url -output "frontend_website_url_output" { - description = "The URL of the S3 static website" - value = aws_s3_bucket_website_configuration.frontend_website.website_endpoint -} \ No newline at end of file diff --git a/terraform/lambda_api_gateway.tf b/terraform/lambda_api_gateway.tf index 0eb8044..3017a7b 100644 --- a/terraform/lambda_api_gateway.tf +++ b/terraform/lambda_api_gateway.tf @@ -20,42 +20,43 @@ resource "aws_iam_role_policy_attachment" "lambda_policy" { policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" } -# Data source to get the S3 object details for the Lambda code -data "aws_s3_object" "lambda_code_object" { - bucket = var.lambda_s3_bucket - key = var.lambda_s3_key -} - # AWS Lambda Function Definition # 'publish = true' ensures a new Lambda version is created on every code change resource "aws_lambda_function" "api_lambda" { - function_name = "TransactionSimulatorAPI" - handler = "lambda.handler" - runtime = "nodejs20.x" - timeout = 60 - role = aws_iam_role.lambda_role.arn - # Use S3 bucket and key instead of local filename - s3_bucket = var.lambda_s3_bucket - s3_key = var.lambda_s3_key - source_code_hash = data.aws_s3_object.lambda_code_object.etag # Use ETag from S3 object for source_code_hash + function_name = "TransactionSimulatorAPI" + handler = "lambda.handler" + runtime = "nodejs20.x" + timeout = 60 + role = aws_iam_role.lambda_role.arn + + s3_bucket = aws_s3_bucket.lambda_code_bucket.bucket + s3_key = aws_s3_object.lambda_package.key + source_code_hash = data.archive_file.lambda_zip.output_sha publish = true - + environment { variables = { SUPABASE_URL = var.supabase_url SUPABASE_KEY = var.supabase_key } } + + depends_on = [aws_s3_object.lambda_package] } # AWS Lambda Alias for Production Traffic (e.g., 'LIVE') -# This alias will initially point to the current stable Lambda version. -# CodeDeploy will update this alias during a canary deployment. +# This alias will be managed by traffic-shifting automation in the deployment pipeline. resource "aws_lambda_alias" "live_alias" { - name = "LIVE" - function_name = aws_lambda_function.api_lambda.function_name - function_version = aws_lambda_function.api_lambda.version # Initially points to the current published version - description = "Alias for live production traffic. Managed by CodeDeploy for canary deployments." + name = "LIVE" + function_name = aws_lambda_function.api_lambda.function_name + description = "Alias for live production traffic." + + # Set an initial version on create, but allow the pipeline to manage future updates. + function_version = aws_lambda_function.api_lambda.version + + lifecycle { + ignore_changes = [function_version, routing_config] + } } # AWS Lambda Alias for Pre-production/Testing (e.g., 'BETA' or 'GREEN') @@ -63,16 +64,15 @@ resource "aws_lambda_alias" "live_alias" { resource "aws_lambda_alias" "beta_alias" { name = "BETA" function_name = aws_lambda_function.api_lambda.function_name - function_version = aws_lambda_function.api_lambda.version # Initially points to the current published version - description = "Alias for beta/pre-production testing. Can be used for manual verification." + function_version = aws_lambda_function.api_lambda.version + description = "Alias for beta/pre-production testing." } # API Gateway Configuration (pointing to the LIVE alias) resource "aws_lambda_permission" "api_gateway" { statement_id = "AllowAPIGatewayInvoke" action = "lambda:InvokeFunction" - # Point API Gateway to the LIVE alias ARN for invocation - function_name = aws_lambda_alias.live_alias.function_name # <--- Reference function name, not ARN for permission + function_name = aws_lambda_alias.live_alias.function_name principal = "apigateway.amazonaws.com" source_arn = "${aws_api_gateway_rest_api.api.execution_arn}/*/*" } @@ -102,12 +102,12 @@ resource "aws_api_gateway_method" "api_login_options" { } resource "aws_api_gateway_integration" "lambda_integration_login_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_login.id - http_method = aws_api_gateway_method.api_login_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_login.id + http_method = aws_api_gateway_method.api_login_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_login_post" { @@ -118,12 +118,12 @@ resource "aws_api_gateway_method" "api_login_post" { } resource "aws_api_gateway_integration" "lambda_integration_login_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_login.id - http_method = aws_api_gateway_method.api_login_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_login.id + http_method = aws_api_gateway_method.api_login_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_resource" "api_transactions" { @@ -154,21 +154,21 @@ resource "aws_api_gateway_method" "api_process_batch_options" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_integration" "lambda_integration_process_batch_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_process_batch.id - http_method = aws_api_gateway_method.api_process_batch_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_process_batch.id + http_method = aws_api_gateway_method.api_process_batch_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_transactions_post" { @@ -186,21 +186,21 @@ resource "aws_api_gateway_method" "api_process_batch_post" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_integration" "lambda_integration_process_batch_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_process_batch.id - http_method = aws_api_gateway_method.api_process_batch_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_process_batch.id + http_method = aws_api_gateway_method.api_process_batch_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_transactions_get" { @@ -211,12 +211,12 @@ resource "aws_api_gateway_method" "api_transactions_get" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_get" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_get.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_get.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } # Enable CORS for API Gateway @@ -271,8 +271,8 @@ resource "aws_api_gateway_method_response" "process_batch_options_200" { status_code = "200" response_parameters = { - "method.response.header.Access-Control-Allow-Origin" = true - "method.response.header.Access-Control-Allow-Methods" = true + "method.response.header.Access-Control-Allow-Origin" = true, + "method.response.header.Access-Control-Allow-Methods" = true, "method.response.header.Access-Control-Allow-Headers" = true } } @@ -302,8 +302,8 @@ resource "aws_api_gateway_integration_response" "process_batch_options_response" status_code = aws_api_gateway_method_response.process_batch_options_200.status_code response_parameters = { - "method.response.header.Access-Control-Allow-Origin" = "'*'" - "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,Authorization,X-Requested-With'" + "method.response.header.Access-Control-Allow-Origin" = "'*'", + "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,Authorization,X-Requested-With'", "method.response.header.Access-Control-Allow-Methods" = "'GET,POST,OPTIONS'" } @@ -390,28 +390,3 @@ resource "aws_cloudwatch_log_group" "api_gateway_logs" { retention_in_days = 7 } -# Outputs -output "api_gateway_invoke_url" { - description = "API Gateway invoke URL" - value = "${aws_api_gateway_stage.prod_stage.invoke_url}/api" -} - -output "lambda_function_name" { - description = "Lambda function name" - value = aws_lambda_function.api_lambda.function_name -} - -output "lambda_function_arn" { - description = "Lambda function ARN" - value = aws_lambda_function.api_lambda.arn -} - -output "lambda_live_alias_arn" { - description = "Lambda LIVE alias ARN" - value = aws_lambda_alias.live_alias.arn -} - -output "api_gateway_rest_api_id" { - description = "API Gateway REST API ID" - value = aws_api_gateway_rest_api.api.id -} \ No newline at end of file diff --git a/terraform/outputs.tf b/terraform/outputs.tf index 321c7fb..9abeb5f 100644 --- a/terraform/outputs.tf +++ b/terraform/outputs.tf @@ -19,21 +19,19 @@ output "frontend_bucket_name" { sensitive = false } -# Output the Lambda Function Name for CodePipeline/CodeDeploy reference +# Output the Lambda Function Name for reference output "lambda_function_name" { - description = "The name of the Lambda function for CodeDeploy." + description = "The name of the Lambda function" value = aws_lambda_function.api_lambda.function_name - # This export name should match what CodePipeline expects - # For cross-stack references, this output can be imported as "TransactionSimulatorLambdaFunctionName" } # Ensure these are also outputs if you want them visible/importable - output "lambda_live_alias_arn" { - description = "The ARN of the Lambda LIVE alias" - value = aws_lambda_alias.live_alias.arn - } +output "lambda_live_alias_arn" { + description = "The ARN of the Lambda LIVE alias" + value = aws_lambda_alias.live_alias.arn +} - output "lambda_beta_alias_arn" { - description = "The ARN of the Lambda BETA alias" - value = aws_lambda_alias.beta_alias.arn - } \ No newline at end of file +output "lambda_beta_alias_arn" { + description = "The ARN of the Lambda BETA alias" + value = aws_lambda_alias.beta_alias.arn +} diff --git a/terraform/s3_lambda_code_bucket.tf b/terraform/s3_lambda_code_bucket.tf index 1bfbab4..1b43ef9 100644 --- a/terraform/s3_lambda_code_bucket.tf +++ b/terraform/s3_lambda_code_bucket.tf @@ -5,7 +5,7 @@ data "aws_region" "current" {} # Create or reference the S3 bucket for Lambda code resource "aws_s3_bucket" "lambda_code_bucket" { bucket = "transaction-simulator-lambda-code-${data.aws_caller_identity.current.account_id}-${data.aws_region.current.name}" - + tags = { Name = "TransactionSimulatorLambdaCode" Project = "TransactionSimulator" @@ -37,12 +37,30 @@ resource "aws_s3_bucket_public_access_block" "lambda_code_bucket_pab" { # Bucket versioning for Lambda code bucket resource "aws_s3_bucket_versioning" "lambda_code_bucket_versioning" { bucket = aws_s3_bucket.lambda_code_bucket.id + versioning_configuration { status = "Enabled" } } -# Output the bucket name for use in other resources +# Package the Lambda function code from the local directory +# Ensure the GitHub Actions workflow installs dependencies before running Terraform +# so that node_modules are included in the archive when necessary. +data "archive_file" "lambda_zip" { + type = "zip" + source_dir = "${path.module}/../lambda" + output_path = "${path.module}/.terraform/lambda.zip" +} + +# Upload the packaged Lambda artifact to the managed bucket +resource "aws_s3_object" "lambda_package" { + bucket = aws_s3_bucket.lambda_code_bucket.id + key = "lambda-packages/${data.archive_file.lambda_zip.output_sha}.zip" + source = data.archive_file.lambda_zip.output_path + etag = filemd5(data.archive_file.lambda_zip.output_path) +} + +# Output the bucket name and key for reference output "lambda_code_bucket_name" { description = "Name of the S3 bucket storing Lambda deployment packages." value = aws_s3_bucket.lambda_code_bucket.bucket @@ -51,4 +69,9 @@ output "lambda_code_bucket_name" { output "lambda_code_bucket_arn" { description = "ARN of the S3 bucket storing Lambda deployment packages." value = aws_s3_bucket.lambda_code_bucket.arn -} \ No newline at end of file +} + +output "lambda_package_key" { + description = "Key of the Lambda package uploaded to S3." + value = aws_s3_object.lambda_package.key +} diff --git a/terraform/variables.tf b/terraform/variables.tf index 19c4001..b883457 100644 --- a/terraform/variables.tf +++ b/terraform/variables.tf @@ -1,21 +1,11 @@ - variable "supabase_url" { - description = "Supabase project URL" - type = string - sensitive = true - } +variable "supabase_url" { + description = "Supabase project URL" + type = string + sensitive = true +} - variable "supabase_key" { - description = "Supabase anon/public key" - type = string - sensitive = true - } - - variable "lambda_s3_bucket" { - description = "S3 bucket containing the Lambda deployment package" - type = string - } - - variable "lambda_s3_key" { - description = "S3 key (path) to the Lambda deployment package" - type = string - } \ No newline at end of file +variable "supabase_key" { + description = "Supabase anon/public key" + type = string + sensitive = true +}