From 2a77e48a2e7251ff8bd5164c27f13d3b51415b6d Mon Sep 17 00:00:00 2001 From: Amin <36745993+4Min4m@users.noreply.github.com> Date: Fri, 7 Nov 2025 19:19:41 +0100 Subject: [PATCH 1/2] Simplify pipeline and move Lambda packaging into Terraform --- .github/workflows/ci-cd.yml | 307 ++++++++++++++++++ .gitignore | 1 + README.md | 20 +- backend/dist/graphql/schema.js | 37 --- backend/dist/index.js | 58 ---- backend/dist/middleware/auth.js | 25 -- backend/dist/middleware/errorHandler.js | 8 - backend/dist/models/batch.js | 2 - backend/dist/models/transaction.js | 2 - backend/dist/routes/authRoutes.js | 28 -- backend/dist/routes/batchRoutes.js | 29 -- backend/dist/routes/transactionRoutes.js | 49 --- backend/dist/services/batchService.js | 42 --- backend/dist/services/supabaseService.js | 15 - backend/dist/services/transactionService.js | 58 ---- backend/dist/src/graphql/schema.js | 33 -- backend/dist/src/index.js | 51 --- backend/dist/src/middleware/auth.js | 25 -- backend/dist/src/middleware/errorHandler.js | 8 - backend/dist/src/models/batch.js | 2 - backend/dist/src/models/transaction.js | 2 - backend/dist/src/routes/authRoutes.js | 27 -- backend/dist/src/routes/batchRoutes.js | 18 - backend/dist/src/routes/transactionRoutes.js | 29 -- backend/dist/src/services/batchService.js | 42 --- backend/dist/src/services/supabaseService.js | 15 - .../dist/src/services/transactionService.js | 58 ---- backend/dist/src/utils/supabaseClient.js | 10 - backend/dist/utils/supabaseClient.js | 10 - backend/nodemon.json | 2 +- backend/package-lock.json | 19 -- backend/src/index.ts | 6 +- backend/src/utils/supabaseClient.ts | 2 +- buildspec-create-lambda-s3-bucket.yml | 10 - buildspec-frontend-build.yml | 34 -- buildspec-lambda-canary-deployment.yml | 61 ---- buildspec-lambda-package.yml | 59 ---- buildspec-smoke-tests.yml | 42 --- buildspec-static-analysis.yml | 88 ----- buildspec-terraform-apply.yml | 46 --- buildspec-terraform-plan.yml | 110 ------- frontend/dist/index.html | 14 - terraform/frontend.tf | 6 - terraform/lambda_api_gateway.tf | 132 ++++---- terraform/outputs.tf | 22 +- terraform/s3_lambda_code_bucket.tf | 29 +- terraform/variables.tf | 30 +- 47 files changed, 436 insertions(+), 1287 deletions(-) create mode 100644 .github/workflows/ci-cd.yml delete mode 100644 backend/dist/graphql/schema.js delete mode 100644 backend/dist/index.js delete mode 100644 backend/dist/middleware/auth.js delete mode 100644 backend/dist/middleware/errorHandler.js delete mode 100644 backend/dist/models/batch.js delete mode 100644 backend/dist/models/transaction.js delete mode 100644 backend/dist/routes/authRoutes.js delete mode 100644 backend/dist/routes/batchRoutes.js delete mode 100644 backend/dist/routes/transactionRoutes.js delete mode 100644 backend/dist/services/batchService.js delete mode 100644 backend/dist/services/supabaseService.js delete mode 100644 backend/dist/services/transactionService.js delete mode 100644 backend/dist/src/graphql/schema.js delete mode 100644 backend/dist/src/index.js delete mode 100644 backend/dist/src/middleware/auth.js delete mode 100644 backend/dist/src/middleware/errorHandler.js delete mode 100644 backend/dist/src/models/batch.js delete mode 100644 backend/dist/src/models/transaction.js delete mode 100644 backend/dist/src/routes/authRoutes.js delete mode 100644 backend/dist/src/routes/batchRoutes.js delete mode 100644 backend/dist/src/routes/transactionRoutes.js delete mode 100644 backend/dist/src/services/batchService.js delete mode 100644 backend/dist/src/services/supabaseService.js delete mode 100644 backend/dist/src/services/transactionService.js delete mode 100644 backend/dist/src/utils/supabaseClient.js delete mode 100644 backend/dist/utils/supabaseClient.js delete mode 100644 buildspec-create-lambda-s3-bucket.yml delete mode 100644 buildspec-frontend-build.yml delete mode 100644 buildspec-lambda-canary-deployment.yml delete mode 100644 buildspec-lambda-package.yml delete mode 100644 buildspec-smoke-tests.yml delete mode 100644 buildspec-static-analysis.yml delete mode 100644 buildspec-terraform-apply.yml delete mode 100644 buildspec-terraform-plan.yml delete mode 100644 frontend/dist/index.html diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml new file mode 100644 index 0000000..db85b8e --- /dev/null +++ b/.github/workflows/ci-cd.yml @@ -0,0 +1,307 @@ +name: CI/CD Pipeline + +on: + push: + branches: [ main ] + workflow_dispatch: + +permissions: + contents: read + id-token: write + +env: + AWS_REGION: ${{ secrets.AWS_REGION }} + TERRAFORM_VERSION: 1.7.5 + NODE_VERSION: '20' + SUPABASE_URL: ${{ secrets.SUPABASE_URL }} + SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + AB_TRAFFIC_WEIGHT: '0.5' + +jobs: + terraform-plan: + name: Terraform Plan + runs-on: ubuntu-latest + env: + TF_VAR_supabase_url: ${{ env.SUPABASE_URL }} + TF_VAR_supabase_key: ${{ env.SUPABASE_KEY }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: | + frontend/package-lock.json + lambda/package-lock.json + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} + + - name: Set up Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TERRAFORM_VERSION }} + + - name: Install frontend dependencies + working-directory: frontend + run: npm ci + + - name: Build frontend + working-directory: frontend + run: npm run build + + - name: Install Lambda dependencies + working-directory: lambda + run: npm ci + + - name: Terraform init + working-directory: terraform + run: terraform init -input=false -reconfigure + + - name: Terraform validate + working-directory: terraform + run: terraform validate + + - name: Terraform plan + working-directory: terraform + run: terraform plan -input=false -out=tfplan + + - name: Capture current Lambda alias versions + id: snapshot-aliases + run: | + set -euo pipefail + mkdir -p artifacts + FUNCTION_NAME="TransactionSimulatorAPI" + LIVE_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name LIVE --query 'FunctionVersion' --output text 2>/dev/null || true) + BETA_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name BETA --query 'FunctionVersion' --output text 2>/dev/null || true) + cat < artifacts/lambda_alias_versions.json + { + "function_name": "$FUNCTION_NAME", + "live_version": "${LIVE_VERSION:-}", + "beta_version": "${BETA_VERSION:-}" + } + JSON + + - name: Upload Terraform plan + uses: actions/upload-artifact@v4 + with: + name: terraform-plan + path: terraform/tfplan + + - name: Upload alias snapshot + uses: actions/upload-artifact@v4 + with: + name: pre-deploy-alias-snapshot + path: artifacts/lambda_alias_versions.json + + manual-approval: + name: Await Manual Approval + runs-on: ubuntu-latest + needs: terraform-plan + environment: + name: production + steps: + - name: Approval required + run: echo "Approve this job in the production environment to continue." + + terraform-apply: + name: Terraform Apply + runs-on: ubuntu-latest + needs: manual-approval + env: + TF_VAR_supabase_url: ${{ env.SUPABASE_URL }} + TF_VAR_supabase_key: ${{ env.SUPABASE_KEY }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + cache-dependency-path: | + frontend/package-lock.json + lambda/package-lock.json + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} + + - name: Set up Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TERRAFORM_VERSION }} + + - name: Install frontend dependencies + working-directory: frontend + run: npm ci + + - name: Build frontend + working-directory: frontend + run: npm run build + + - name: Install Lambda dependencies + working-directory: lambda + run: npm ci + + - name: Terraform init + working-directory: terraform + run: terraform init -input=false -reconfigure + + - name: Terraform apply + working-directory: terraform + run: terraform apply -input=false -auto-approve + + - name: Capture Terraform outputs + working-directory: terraform + run: terraform output -json > ../terraform_outputs.json + + - name: Upload Terraform outputs + uses: actions/upload-artifact@v4 + with: + name: terraform-outputs + path: terraform_outputs.json + + lambda-ab-test: + name: Configure Lambda A/B Routing + runs-on: ubuntu-latest + needs: terraform-apply + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }} + aws-region: ${{ env.AWS_REGION }} + + - name: Download Terraform outputs + uses: actions/download-artifact@v4 + with: + name: terraform-outputs + path: artifacts + + - name: Download alias snapshot + uses: actions/download-artifact@v4 + with: + name: pre-deploy-alias-snapshot + path: artifacts + + - name: Install jq + run: sudo apt-get update -y && sudo apt-get install -y jq + + - name: Configure traffic weights + env: + ROUTING_WEIGHT: ${{ env.AB_TRAFFIC_WEIGHT }} + run: | + set -euo pipefail + SNAPSHOT_FILE="artifacts/lambda_alias_versions.json" + OUTPUT_FILE="artifacts/terraform_outputs.json" + + if [ ! -f "$SNAPSHOT_FILE" ]; then + echo "Alias snapshot not found; skipping A/B configuration." + exit 0 + fi + + PREVIOUS_VERSION=$(jq -r '.live_version // ""' "$SNAPSHOT_FILE") + FUNCTION_NAME=$(jq -r '.lambda_function_name.value // ""' "$OUTPUT_FILE") + + if [ -z "$FUNCTION_NAME" ] || [ "$FUNCTION_NAME" = "null" ]; then + echo "Unable to determine Lambda function name; skipping A/B configuration." >&2 + exit 1 + fi + + if [ -z "$PREVIOUS_VERSION" ] || [ "$PREVIOUS_VERSION" = "None" ]; then + echo "No previous live version recorded; skipping A/B configuration." + exit 0 + fi + + NEW_VERSION=$(aws lambda get-alias --function-name "$FUNCTION_NAME" --name BETA --query 'FunctionVersion' --output text) + + if [ -z "$NEW_VERSION" ] || [ "$NEW_VERSION" = "None" ]; then + echo "Unable to determine beta alias version; skipping A/B configuration." + exit 0 + fi + + if [ "$PREVIOUS_VERSION" = "$NEW_VERSION" ]; then + echo "Live and beta versions match; nothing to route." + exit 0 + fi + + echo "Routing $(echo "$ROUTING_WEIGHT" | awk '{printf "%s%%", $1*100}') of traffic to version $NEW_VERSION while keeping version $PREVIOUS_VERSION as primary." + + aws lambda update-alias \ + --function-name "$FUNCTION_NAME" \ + --name LIVE \ + --function-version "$PREVIOUS_VERSION" + + aws lambda update-alias \ + --function-name "$FUNCTION_NAME" \ + --name LIVE \ + --routing-config "AdditionalVersionWeights={\"$NEW_VERSION\"=$ROUTING_WEIGHT}" + + smoke-tests: + name: Smoke Tests + runs-on: ubuntu-latest + needs: lambda-ab-test + steps: + - name: Download Terraform outputs + uses: actions/download-artifact@v4 + with: + name: terraform-outputs + path: artifacts + + - name: Install curl and jq + run: sudo apt-get update -y && sudo apt-get install -y curl jq + + - name: Run smoke tests + env: + TERRAFORM_OUTPUTS: artifacts/terraform_outputs.json + run: | + set -euo pipefail + API_URL=$(jq -r '.api_gateway_invoke_url.value // ""' "$TERRAFORM_OUTPUTS") + FRONTEND_URL=$(jq -r '.frontend_website_url.value // ""' "$TERRAFORM_OUTPUTS") + + if [ -z "$API_URL" ] || [ "$API_URL" = "null" ]; then + echo "ERROR: API_URL is empty." >&2 + exit 1 + fi + + if [ -z "$FRONTEND_URL" ] || [ "$FRONTEND_URL" = "null" ]; then + echo "ERROR: FRONTEND_URL is empty." >&2 + exit 1 + fi + + echo "Testing backend login endpoint at ${API_URL}/login" + API_STATUS=$(curl -sS -o api_response.json -w '%{http_code}' \ + -X POST "$API_URL/login" \ + -H 'Content-Type: application/json' \ + -d '{"username":"admin","password":"password123"}') + + if [ "$API_STATUS" -ne 200 ]; then + echo "Login endpoint returned status $API_STATUS" >&2 + cat api_response.json >&2 || true + exit 1 + fi + + jq -e '.success == true' api_response.json >/dev/null + + echo "Testing frontend availability at $FRONTEND_URL" + FRONTEND_STATUS=$(curl -sS -o frontend_response.html -w '%{http_code}' "$FRONTEND_URL") + + if [ "$FRONTEND_STATUS" -ge 400 ]; then + echo "Frontend returned status $FRONTEND_STATUS" >&2 + cat frontend_response.html >&2 || true + exit 1 + fi + + grep -q "Payment Simulator" frontend_response.html + + echo "Smoke tests completed successfully." diff --git a/.gitignore b/.gitignore index 051bbfe..5b8e844 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ yarn-error.log* # TypeScript *.tsbuildinfo dist/ +**/dist/ **/*.js.map **/*.d.ts diff --git a/README.md b/README.md index 3e6c73c..74b01fe 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,8 @@ A robust payment transaction simulator designed to test and demonstrate payment - **Icons**: Lucide React - **Database**: Supabase - **Build Tool**: Vite -- **Backend**: FastAPI (Python) -- **Deployment**: Render +- **Backend**: Express.js with Apollo Server (TypeScript) +- **Deployment**: AWS (Terraform + GitHub Actions) - **Containerization**: Docker --- @@ -49,7 +49,7 @@ A robust payment transaction simulator designed to test and demonstrate payment ### **Prerequisites** - Node.js (v18 or higher) -- Python (v3.9 or higher) +- npm (comes with Node.js) - Docker (optional) ### **Steps to Run Locally** @@ -67,7 +67,7 @@ A robust payment transaction simulator designed to test and demonstrate payment # Backend cd ../backend - pip install -r requirements.txt + npm install ``` 3. Set up environment variables: @@ -86,7 +86,7 @@ A robust payment transaction simulator designed to test and demonstrate payment - **Backend**: ```bash cd backend - uvicorn app.main:app --reload + npm run dev ``` 5. Open your browser and navigate to `http://localhost:5173`. @@ -189,7 +189,7 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file - Built with [Supabase](https://supabase.com) for real-time database capabilities - UI components powered by [Tailwind CSS](https://tailwindcss.com) - Icons provided by [Lucide](https://lucide.dev) -- Backend powered by [FastAPI](https://fastapi.tiangolo.com) +- Backend powered by [Express.js](https://expressjs.com/) and [Apollo Server](https://www.apollographql.com/docs/apollo-server/) --- @@ -203,8 +203,10 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file ## 📊 **Infrastructure and Deployment** ### **Production Environment** -- **Frontend Hosting**: Render -- **Backend Hosting**: Render +- **Frontend Hosting**: Amazon S3 static website +- **Backend Hosting**: AWS Lambda behind API Gateway +- **Infrastructure as Code**: Terraform +- **CI/CD**: GitHub Actions (manual approval + smoke tests) - **Database**: Supabase ### **Environment Variables** @@ -285,4 +287,4 @@ Join the discussion and share your feedback: --- -**#PaymentSimulator #FastAPI #ReactJS #Supabase #Docker #Render #DevOps #OpenSource #FinTech #PaymentProcessing #LoadTesting #TransactionSimulation** +**#PaymentSimulator #ExpressJS #ReactJS #Supabase #Docker #Render #DevOps #OpenSource #FinTech #PaymentProcessing #LoadTesting #TransactionSimulation** diff --git a/backend/dist/graphql/schema.js b/backend/dist/graphql/schema.js deleted file mode 100644 index 9c88e66..0000000 --- a/backend/dist/graphql/schema.js +++ /dev/null @@ -1,37 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolvers = exports.typeDefs = void 0; -const graphql_tag_1 = require("graphql-tag"); -const graphql_type_json_1 = require("graphql-type-json"); // You'll need to install this -const supabaseService_1 = require("../services/supabaseService"); -// Define schema with JSON scalar type -exports.typeDefs = (0, graphql_tag_1.gql) ` - scalar JSON - - type Transaction { - id: ID! - card_number: String! - amount: Float! - merchant_id: String! - status: String! - type: String! - timestamp: String! - created_at: String! - updated_at: String! - iso8583_message: JSON - } - - type Query { - transactions: [Transaction!]! - } -`; -// Define resolvers -exports.resolvers = { - JSON: graphql_type_json_1.GraphQLJSON, - Query: { - transactions: async () => { - const transactions = await (0, supabaseService_1.getTransactions)(); - return transactions; - }, - }, -}; diff --git a/backend/dist/index.js b/backend/dist/index.js deleted file mode 100644 index 94e1403..0000000 --- a/backend/dist/index.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = __importDefault(require("express")); -const server_1 = require("@apollo/server"); -const express4_1 = require("@apollo/server/express4"); -const cors_1 = __importDefault(require("cors")); -const body_parser_1 = __importDefault(require("body-parser")); -const dotenv_1 = __importDefault(require("dotenv")); -const transactionRoutes_1 = __importDefault(require("./routes/transactionRoutes")); -const batchRoutes_1 = __importDefault(require("./routes/batchRoutes")); -const authRoutes_1 = __importDefault(require("./routes/authRoutes")); -const errorHandler_1 = require("./middleware/errorHandler"); -const schema_1 = require("./graphql/schema"); -// لود متغیرهای محیطی -dotenv_1.default.config({ path: "./.env" }); -console.log("Env vars:", { - PORT: process.env.PORT, - SUPABASE_URL: process.env.SUPABASE_URL, - SUPABASE_KEY: process.env.SUPABASE_KEY, -}); -const app = (0, express_1.default)(); -app.use((0, cors_1.default)({ - origin: (origin, callback) => { - const allowedOrigins = ["*"]; - if (!origin || allowedOrigins.includes(origin) || origin.includes(".app.github.dev")) { - callback(null, true); - } - else { - callback(new Error("Not allowed by CORS")); - } - }, - methods: ["GET", "POST", "OPTIONS"], - allowedHeaders: ["Content-Type", "Authorization"], - credentials: true, -})); -app.options("*", (0, cors_1.default)()); // برای Preflight -app.use(body_parser_1.default.json()); -app.use("/api", transactionRoutes_1.default); -app.use("/api", batchRoutes_1.default); -app.use("/api", authRoutes_1.default); -const server = new server_1.ApolloServer({ - typeDefs: schema_1.typeDefs, - resolvers: schema_1.resolvers, -}); -async function startServer() { - await server.start(); - app.use("/graphql", (0, express4_1.expressMiddleware)(server)); - app.use(errorHandler_1.errorHandler); - const PORT = process.env.PORT || 8000; - app.listen(PORT, () => { - console.log(`Server running on port ${PORT}`); - console.log(`GraphQL endpoint: http://localhost:${PORT}/graphql`); - }); -} -startServer(); diff --git a/backend/dist/middleware/auth.js b/backend/dist/middleware/auth.js deleted file mode 100644 index b2a1616..0000000 --- a/backend/dist/middleware/auth.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.authenticateToken = void 0; -const jsonwebtoken_1 = __importDefault(require("jsonwebtoken")); -const JWT_SECRET = process.env.JWT_SECRET || "your_jwt_secret"; -const authenticateToken = (req, res, next) => { - const authHeader = req.headers["authorization"]; - const token = authHeader && authHeader.split(" ")[1]; // Bearer TOKEN - if (!token) { - res.status(401).json({ detail: "Access token required" }); - return; - } - try { - const user = jsonwebtoken_1.default.verify(token, JWT_SECRET); - req.user = user; - next(); - } - catch (error) { - res.status(403).json({ detail: "Invalid token" }); - } -}; -exports.authenticateToken = authenticateToken; diff --git a/backend/dist/middleware/errorHandler.js b/backend/dist/middleware/errorHandler.js deleted file mode 100644 index 422bcb2..0000000 --- a/backend/dist/middleware/errorHandler.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.errorHandler = void 0; -const errorHandler = (error, req, res, next) => { - console.error(error.stack); - res.status(500).json({ detail: error.message || "Internal Server Error" }); -}; -exports.errorHandler = errorHandler; diff --git a/backend/dist/models/batch.js b/backend/dist/models/batch.js deleted file mode 100644 index c8ad2e5..0000000 --- a/backend/dist/models/batch.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/backend/dist/models/transaction.js b/backend/dist/models/transaction.js deleted file mode 100644 index c8ad2e5..0000000 --- a/backend/dist/models/transaction.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/backend/dist/routes/authRoutes.js b/backend/dist/routes/authRoutes.js deleted file mode 100644 index b985b9f..0000000 --- a/backend/dist/routes/authRoutes.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const jsonwebtoken_1 = __importDefault(require("jsonwebtoken")); -const router = (0, express_1.Router)(); -const JWT_SECRET = process.env.JWT_SECRET || "your_jwt_secret"; -const loginHandler = async (req, res, next) => { - try { - const { username, password } = req.body; - console.log("Received:", { username, password }); - // اعتبارسنجی ساده - if (username !== "admin" || password !== "password") { - res.status(401).json({ detail: "Invalid credentials" }); - return; - } - const user = { id: 1, username: "admin", role: "admin" }; - const token = jsonwebtoken_1.default.sign(user, JWT_SECRET, { expiresIn: "1h" }); - res.json({ token }); - } - catch (error) { - next(error); - } -}; -router.post("/login", loginHandler); -exports.default = router; diff --git a/backend/dist/routes/batchRoutes.js b/backend/dist/routes/batchRoutes.js deleted file mode 100644 index 189c24c..0000000 --- a/backend/dist/routes/batchRoutes.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const batchService_1 = require("../services/batchService"); -const auth_1 = require("../middleware/auth"); -const router = (0, express_1.Router)(); -// مسیر محافظت‌شده -router.post("/process-batch", auth_1.authenticateToken, async (req, res, next) => { - try { - const batch = req.body; - const response = await (0, batchService_1.processBatch)(batch); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}); -// مسیر عمومی -router.post("/public/process-batch", async (req, res, next) => { - try { - const batch = req.body; - const response = await (0, batchService_1.processBatch)(batch); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}); -exports.default = router; diff --git a/backend/dist/routes/transactionRoutes.js b/backend/dist/routes/transactionRoutes.js deleted file mode 100644 index ea9e146..0000000 --- a/backend/dist/routes/transactionRoutes.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const transactionService_1 = require("../services/transactionService"); -const supabaseService_1 = require("../services/supabaseService"); -const auth_1 = require("../middleware/auth"); -const router = (0, express_1.Router)(); -// مسیر محافظت‌شده (نیاز به توکن) -router.get("/transactions", auth_1.authenticateToken, async (req, res, next) => { - try { - const transactions = await (0, supabaseService_1.getTransactions)(); - res.status(200).json(transactions); - } - catch (error) { - next(error); - } -}); -// مسیر عمومی (بدون نیاز به توکن) -router.get("/public/transactions", async (req, res, next) => { - try { - const transactions = await (0, supabaseService_1.getTransactions)(); - res.status(200).json(transactions); - } - catch (error) { - next(error); - } -}); -// مسیرهای دیگه (مثل process-transaction) هم همین‌طور -router.post("/process-transaction", auth_1.authenticateToken, async (req, res, next) => { - try { - const transaction = req.body; - const response = await (0, transactionService_1.processTransaction)(transaction); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}); -router.post("/public/process-transaction", async (req, res, next) => { - try { - const transaction = req.body; - const response = await (0, transactionService_1.processTransaction)(transaction); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}); -exports.default = router; diff --git a/backend/dist/services/batchService.js b/backend/dist/services/batchService.js deleted file mode 100644 index 499f91d..0000000 --- a/backend/dist/services/batchService.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.processBatch = void 0; -const transactionService_1 = require("./transactionService"); -const processBatch = async (batch) => { - const totalTransactions = batch.total_transactions; - const amountPerTransaction = batch.total_amount / totalTransactions; - const delayBetweenTransactions = batch.duration_seconds / totalTransactions; - let successCount = 0; - let failureCount = 0; - let totalResponseTime = 0; - let totalProcessedAmount = 0; - const transactions = []; - for (let i = 0; i < totalTransactions; i++) { - const startTime = Date.now(); - const transaction = { - card_number: "4111111111111111", - amount: amountPerTransaction, - merchant_id: batch.merchant_id, - }; - const response = await (0, transactionService_1.processTransaction)(transaction); - if (response.success) { - successCount++; - totalProcessedAmount += amountPerTransaction; - } - else { - failureCount++; - } - transactions.push(response); - totalResponseTime += Date.now() - startTime; - // delay between transactions - await new Promise((resolve) => setTimeout(resolve, delayBetweenTransactions * 1000)); - } - return { - success_count: successCount, - failure_count: failureCount, - average_response_time: totalResponseTime / totalTransactions, - total_processed_amount: totalProcessedAmount, - transactions, - }; -}; -exports.processBatch = processBatch; diff --git a/backend/dist/services/supabaseService.js b/backend/dist/services/supabaseService.js deleted file mode 100644 index 16d0938..0000000 --- a/backend/dist/services/supabaseService.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getTransactions = void 0; -const supabaseClient_1 = require("../utils/supabaseClient"); -const getTransactions = async () => { - const { data, error } = await supabaseClient_1.supabase - .from("transactions") - .select("*") - .order("created_at", { ascending: false }) // order by time - .limit(1); // only one record - if (error) - throw new Error(error.message); - return data; -}; -exports.getTransactions = getTransactions; diff --git a/backend/dist/services/transactionService.js b/backend/dist/services/transactionService.js deleted file mode 100644 index 802042d..0000000 --- a/backend/dist/services/transactionService.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.processTransaction = void 0; -const supabaseClient_1 = require("../utils/supabaseClient"); -const generateISO8583Message = (transaction, responseCode) => { - const now = new Date(); - return { - mti: "0110", - primaryAccountNumber: transaction.card_number, - processingCode: "000000", - amount: transaction.amount, - transmissionDateTime: now.toISOString().replace(/[-:T.]/g, "").slice(0, 14), - systemTraceNumber: Math.floor(Math.random() * (999999 - 100000 + 1) + 100000).toString(), - localTransactionTime: now.toTimeString().slice(0, 8), - localTransactionDate: now.toLocaleDateString("en-US", { month: "2-digit", day: "2-digit", year: "numeric" }), - merchantType: "5999", - responseCode: responseCode, - terminalId: "TERM001", - merchantId: transaction.merchant_id, - }; -}; -const processTransaction = async (transaction) => { - // simulating delay - await new Promise((resolve) => setTimeout(resolve, Math.random() * 900 + 100)); - // aprrove logic - const isApproved = Math.random() < 0.9; // 90% success - const responseCode = isApproved ? "00" : "05"; - const authorizationCode = isApproved - ? Array(6) - .fill(0) - .map(() => "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".charAt(Math.floor(Math.random() * 36))) - .join("") - : undefined; - // generating ISO 8583 message - const iso8583Message = generateISO8583Message(transaction, responseCode); - // Supabase data generation - const transactionData = { - card_number: transaction.card_number, - amount: transaction.amount, - merchant_id: transaction.merchant_id, - status: isApproved ? "APPROVED" : "DECLINED", - type: "PURCHASE", - timestamp: new Date().toISOString(), - created_at: new Date().toISOString(), - updated_at: new Date().toISOString(), - iso8583_message: iso8583Message, - }; - // insert into Supabase - await supabaseClient_1.supabase.from("transactions").insert(transactionData).select(); - return { - success: isApproved, - message: isApproved ? "Transaction approved" : "Transaction declined", - data: transaction, - response_code: responseCode, - authorization_code: authorizationCode, - }; -}; -exports.processTransaction = processTransaction; diff --git a/backend/dist/src/graphql/schema.js b/backend/dist/src/graphql/schema.js deleted file mode 100644 index ab307e1..0000000 --- a/backend/dist/src/graphql/schema.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolvers = exports.typeDefs = void 0; -const graphql_tag_1 = require("graphql-tag"); // تغییر به graphql-tag -const supabaseService_1 = require("../services/supabaseService"); -// تعریف اسکیما -exports.typeDefs = (0, graphql_tag_1.gql) ` - type Transaction { - id: ID! - card_number: String! - amount: Float! - merchant_id: String! - status: String! - type: String! - timestamp: String! - created_at: String! - updated_at: String! - iso8583_message: JSON - } - - type Query { - transactions: [Transaction!]! - } -`; -// تعریف Resolverها -exports.resolvers = { - Query: { - transactions: async () => { - const transactions = await (0, supabaseService_1.getTransactions)(); - return transactions; - }, - }, -}; diff --git a/backend/dist/src/index.js b/backend/dist/src/index.js deleted file mode 100644 index aeddac7..0000000 --- a/backend/dist/src/index.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = __importDefault(require("express")); -const dotenv_1 = __importDefault(require("dotenv")); -const transactionRoutes_1 = __importDefault(require("./routes/transactionRoutes")); -const batchRoutes_1 = __importDefault(require("./routes/batchRoutes")); -const authRoutes_1 = __importDefault(require("./routes/authRoutes")); -const errorHandler_1 = require("./middleware/errorHandler"); -const auth_1 = require("./middleware/auth"); -const server_1 = require("@apollo/server"); -const express4_1 = require("@apollo/server/express4"); -const schema_1 = require("./graphql/schema"); -const body_parser_1 = require("body-parser"); -dotenv_1.default.config(); -const app = (0, express_1.default)(); -const PORT = process.env.PORT || 8000; -app.use(express_1.default.json()); -app.use((req, res, next) => { - res.header("Access-Control-Allow-Origin", "*"); - res.header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"); - res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Authorization"); - next(); -}); -app.get("/", (req, res) => { - res.json({ message: "Welcome to the Payment Simulator API!" }); -}); -app.use("/api", authRoutes_1.default); -app.use("/api", transactionRoutes_1.default); -app.use("/api", batchRoutes_1.default); -const apolloServer = new server_1.ApolloServer({ - typeDefs: schema_1.typeDefs, - resolvers: schema_1.resolvers, -}); -const startServer = async () => { - await apolloServer.start(); - app.use("/graphql", (0, body_parser_1.json)(), auth_1.authenticateToken, (0, express4_1.expressMiddleware)(apolloServer, { - context: async ({ req, res }) => ({ - req, - res, - }), - })); -}; -startServer().catch(console.error); -app.use(errorHandler_1.errorHandler); -app.listen(PORT, () => { - console.log(`Server is running on port ${PORT}`); - console.log(`GraphQL endpoint available at http://localhost:${PORT}/graphql`); -}); diff --git a/backend/dist/src/middleware/auth.js b/backend/dist/src/middleware/auth.js deleted file mode 100644 index b2a1616..0000000 --- a/backend/dist/src/middleware/auth.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.authenticateToken = void 0; -const jsonwebtoken_1 = __importDefault(require("jsonwebtoken")); -const JWT_SECRET = process.env.JWT_SECRET || "your_jwt_secret"; -const authenticateToken = (req, res, next) => { - const authHeader = req.headers["authorization"]; - const token = authHeader && authHeader.split(" ")[1]; // Bearer TOKEN - if (!token) { - res.status(401).json({ detail: "Access token required" }); - return; - } - try { - const user = jsonwebtoken_1.default.verify(token, JWT_SECRET); - req.user = user; - next(); - } - catch (error) { - res.status(403).json({ detail: "Invalid token" }); - } -}; -exports.authenticateToken = authenticateToken; diff --git a/backend/dist/src/middleware/errorHandler.js b/backend/dist/src/middleware/errorHandler.js deleted file mode 100644 index 422bcb2..0000000 --- a/backend/dist/src/middleware/errorHandler.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.errorHandler = void 0; -const errorHandler = (error, req, res, next) => { - console.error(error.stack); - res.status(500).json({ detail: error.message || "Internal Server Error" }); -}; -exports.errorHandler = errorHandler; diff --git a/backend/dist/src/models/batch.js b/backend/dist/src/models/batch.js deleted file mode 100644 index c8ad2e5..0000000 --- a/backend/dist/src/models/batch.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/backend/dist/src/models/transaction.js b/backend/dist/src/models/transaction.js deleted file mode 100644 index c8ad2e5..0000000 --- a/backend/dist/src/models/transaction.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/backend/dist/src/routes/authRoutes.js b/backend/dist/src/routes/authRoutes.js deleted file mode 100644 index d26392c..0000000 --- a/backend/dist/src/routes/authRoutes.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const jsonwebtoken_1 = __importDefault(require("jsonwebtoken")); -const router = (0, express_1.Router)(); -const JWT_SECRET = process.env.JWT_SECRET || "your_jwt_secret"; -const loginHandler = async (req, res, next) => { - try { - const { username, password } = req.body; - // اعتبارسنجی ساده - if (username !== "admin" || password !== "password") { - res.status(401).json({ detail: "Invalid credentials" }); - return; - } - const user = { id: 1, username: "admin", role: "admin" }; - const token = jsonwebtoken_1.default.sign(user, JWT_SECRET, { expiresIn: "1h" }); - res.json({ token }); - } - catch (error) { - next(error); - } -}; -router.post("/login", loginHandler); -exports.default = router; diff --git a/backend/dist/src/routes/batchRoutes.js b/backend/dist/src/routes/batchRoutes.js deleted file mode 100644 index 66d2fde..0000000 --- a/backend/dist/src/routes/batchRoutes.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const batchService_1 = require("../services/batchService"); -const auth_1 = require("../middleware/auth"); -const router = (0, express_1.Router)(); -const processBatchHandler = async (req, res, next) => { - try { - const batch = req.body; - const response = await (0, batchService_1.processBatch)(batch); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}; -router.post("/process-batch", auth_1.authenticateToken, processBatchHandler); -exports.default = router; diff --git a/backend/dist/src/routes/transactionRoutes.js b/backend/dist/src/routes/transactionRoutes.js deleted file mode 100644 index 2abe674..0000000 --- a/backend/dist/src/routes/transactionRoutes.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const express_1 = require("express"); -const transactionService_1 = require("../services/transactionService"); -const supabaseService_1 = require("../services/supabaseService"); -const auth_1 = require("../middleware/auth"); -const router = (0, express_1.Router)(); -const processTransactionHandler = async (req, res, next) => { - try { - const transaction = req.body; - const response = await (0, transactionService_1.processTransaction)(transaction); - res.status(200).json(response); - } - catch (error) { - next(error); - } -}; -const getTransactionsHandler = async (req, res, next) => { - try { - const transactions = await (0, supabaseService_1.getTransactions)(); - res.status(200).json(transactions); - } - catch (error) { - next(error); - } -}; -router.post("/process-transaction", auth_1.authenticateToken, processTransactionHandler); -router.get("/transactions", auth_1.authenticateToken, getTransactionsHandler); -exports.default = router; diff --git a/backend/dist/src/services/batchService.js b/backend/dist/src/services/batchService.js deleted file mode 100644 index 499f91d..0000000 --- a/backend/dist/src/services/batchService.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.processBatch = void 0; -const transactionService_1 = require("./transactionService"); -const processBatch = async (batch) => { - const totalTransactions = batch.total_transactions; - const amountPerTransaction = batch.total_amount / totalTransactions; - const delayBetweenTransactions = batch.duration_seconds / totalTransactions; - let successCount = 0; - let failureCount = 0; - let totalResponseTime = 0; - let totalProcessedAmount = 0; - const transactions = []; - for (let i = 0; i < totalTransactions; i++) { - const startTime = Date.now(); - const transaction = { - card_number: "4111111111111111", - amount: amountPerTransaction, - merchant_id: batch.merchant_id, - }; - const response = await (0, transactionService_1.processTransaction)(transaction); - if (response.success) { - successCount++; - totalProcessedAmount += amountPerTransaction; - } - else { - failureCount++; - } - transactions.push(response); - totalResponseTime += Date.now() - startTime; - // delay between transactions - await new Promise((resolve) => setTimeout(resolve, delayBetweenTransactions * 1000)); - } - return { - success_count: successCount, - failure_count: failureCount, - average_response_time: totalResponseTime / totalTransactions, - total_processed_amount: totalProcessedAmount, - transactions, - }; -}; -exports.processBatch = processBatch; diff --git a/backend/dist/src/services/supabaseService.js b/backend/dist/src/services/supabaseService.js deleted file mode 100644 index 16d0938..0000000 --- a/backend/dist/src/services/supabaseService.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getTransactions = void 0; -const supabaseClient_1 = require("../utils/supabaseClient"); -const getTransactions = async () => { - const { data, error } = await supabaseClient_1.supabase - .from("transactions") - .select("*") - .order("created_at", { ascending: false }) // order by time - .limit(1); // only one record - if (error) - throw new Error(error.message); - return data; -}; -exports.getTransactions = getTransactions; diff --git a/backend/dist/src/services/transactionService.js b/backend/dist/src/services/transactionService.js deleted file mode 100644 index 802042d..0000000 --- a/backend/dist/src/services/transactionService.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.processTransaction = void 0; -const supabaseClient_1 = require("../utils/supabaseClient"); -const generateISO8583Message = (transaction, responseCode) => { - const now = new Date(); - return { - mti: "0110", - primaryAccountNumber: transaction.card_number, - processingCode: "000000", - amount: transaction.amount, - transmissionDateTime: now.toISOString().replace(/[-:T.]/g, "").slice(0, 14), - systemTraceNumber: Math.floor(Math.random() * (999999 - 100000 + 1) + 100000).toString(), - localTransactionTime: now.toTimeString().slice(0, 8), - localTransactionDate: now.toLocaleDateString("en-US", { month: "2-digit", day: "2-digit", year: "numeric" }), - merchantType: "5999", - responseCode: responseCode, - terminalId: "TERM001", - merchantId: transaction.merchant_id, - }; -}; -const processTransaction = async (transaction) => { - // simulating delay - await new Promise((resolve) => setTimeout(resolve, Math.random() * 900 + 100)); - // aprrove logic - const isApproved = Math.random() < 0.9; // 90% success - const responseCode = isApproved ? "00" : "05"; - const authorizationCode = isApproved - ? Array(6) - .fill(0) - .map(() => "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".charAt(Math.floor(Math.random() * 36))) - .join("") - : undefined; - // generating ISO 8583 message - const iso8583Message = generateISO8583Message(transaction, responseCode); - // Supabase data generation - const transactionData = { - card_number: transaction.card_number, - amount: transaction.amount, - merchant_id: transaction.merchant_id, - status: isApproved ? "APPROVED" : "DECLINED", - type: "PURCHASE", - timestamp: new Date().toISOString(), - created_at: new Date().toISOString(), - updated_at: new Date().toISOString(), - iso8583_message: iso8583Message, - }; - // insert into Supabase - await supabaseClient_1.supabase.from("transactions").insert(transactionData).select(); - return { - success: isApproved, - message: isApproved ? "Transaction approved" : "Transaction declined", - data: transaction, - response_code: responseCode, - authorization_code: authorizationCode, - }; -}; -exports.processTransaction = processTransaction; diff --git a/backend/dist/src/utils/supabaseClient.js b/backend/dist/src/utils/supabaseClient.js deleted file mode 100644 index 2f780ed..0000000 --- a/backend/dist/src/utils/supabaseClient.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.supabase = void 0; -const supabase_js_1 = require("@supabase/supabase-js"); -const supabaseUrl = process.env.SUPABASE_URL; -const supabaseKey = process.env.SUPABASE_KEY; -if (!supabaseUrl || !supabaseKey) { - throw new Error("Supabase URL and key must be set in environment variables."); -} -exports.supabase = (0, supabase_js_1.createClient)(supabaseUrl, supabaseKey); diff --git a/backend/dist/utils/supabaseClient.js b/backend/dist/utils/supabaseClient.js deleted file mode 100644 index 2f780ed..0000000 --- a/backend/dist/utils/supabaseClient.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.supabase = void 0; -const supabase_js_1 = require("@supabase/supabase-js"); -const supabaseUrl = process.env.SUPABASE_URL; -const supabaseKey = process.env.SUPABASE_KEY; -if (!supabaseUrl || !supabaseKey) { - throw new Error("Supabase URL and key must be set in environment variables."); -} -exports.supabase = (0, supabase_js_1.createClient)(supabaseUrl, supabaseKey); diff --git a/backend/nodemon.json b/backend/nodemon.json index 2db8bb8..b49894e 100644 --- a/backend/nodemon.json +++ b/backend/nodemon.json @@ -3,4 +3,4 @@ "ext": "ts", "ignore": ["src/**/*.spec.ts"], "exec": "ts-node ./src/index.ts" -} \ No newline at end of file +} diff --git a/backend/package-lock.json b/backend/package-lock.json index ed314be..f1d36f2 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -23,7 +23,6 @@ "devDependencies": { "@types/cors": "^2.8.17", "@types/graphql": "^14.2.3", - "@types/jsonwebtoken": "^9.0.9", "@types/node": "^22.13.13", "nodemon": "^3.1.9", "ts-node": "^10.9.2", @@ -603,17 +602,6 @@ "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", "license": "MIT" }, - "node_modules/@types/jsonwebtoken": { - "version": "9.0.9", - "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.9.tgz", - "integrity": "sha512-uoe+GxEuHbvy12OUQct2X9JenKM3qAscquYymuQN4fMWG9DBQtykrQEFcAbVACF7qaLw9BePSodUL0kquqBJpQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/ms": "*", - "@types/node": "*" - } - }, "node_modules/@types/long": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", @@ -626,13 +614,6 @@ "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", "license": "MIT" }, - "node_modules/@types/ms": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", - "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/node": { "version": "22.13.13", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.13.tgz", diff --git a/backend/src/index.ts b/backend/src/index.ts index 9e5c6a1..642dd23 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -10,12 +10,12 @@ import authRoutes from "./routes/authRoutes"; import { errorHandler } from "./middleware/errorHandler"; import { typeDefs, resolvers } from "./graphql/schema"; -// لود متغیرهای محیطی +// Load environment variables dotenv.config({ path: "./.env" }); const app = express(); -// تنظیمات CORS +// Configure CORS app.use( cors({ origin: "*", @@ -47,4 +47,4 @@ async function startServer() { }); } -startServer(); \ No newline at end of file +startServer(); diff --git a/backend/src/utils/supabaseClient.ts b/backend/src/utils/supabaseClient.ts index c1607b8..44ec6f8 100644 --- a/backend/src/utils/supabaseClient.ts +++ b/backend/src/utils/supabaseClient.ts @@ -7,4 +7,4 @@ if (!supabaseUrl || !supabaseKey) { throw new Error("Supabase URL and key must be set in environment variables."); } -export const supabase = createClient(supabaseUrl, supabaseKey); \ No newline at end of file +export const supabase = createClient(supabaseUrl, supabaseKey); diff --git a/buildspec-create-lambda-s3-bucket.yml b/buildspec-create-lambda-s3-bucket.yml deleted file mode 100644 index ff2021f..0000000 --- a/buildspec-create-lambda-s3-bucket.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 0.2 -phases: - build: - commands: - - echo "Fetching AWS Account ID..." - - export AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) - - export LAMBDA_CODE_S3_BUCKET="transaction-simulator-lambda-code-${AWS_ACCOUNT_ID}-${AWS_DEFAULT_REGION}" - - echo "Attempting to create S3 bucket $LAMBDA_CODE_S3_BUCKET in region $AWS_DEFAULT_REGION" - - aws s3 mb s3://${LAMBDA_CODE_S3_BUCKET} --region ${AWS_DEFAULT_REGION} || true - - echo "S3 bucket creation attempt complete." \ No newline at end of file diff --git a/buildspec-frontend-build.yml b/buildspec-frontend-build.yml deleted file mode 100644 index 9daae11..0000000 --- a/buildspec-frontend-build.yml +++ /dev/null @@ -1,34 +0,0 @@ -version: 0.2 - -phases: - install: - commands: - - echo Installing Node.js... - - curl -fsSL https://deb.nodesource.com/setup_20.x | bash - - - apt-get install -y nodejs - - cd frontend - - echo Checking Node.js version... - - node -v - - npm -v - - echo Installing frontend dependencies... - - npm install - - cd .. - build: - commands: - - echo Building frontend... - - cd frontend - - npm run build - - echo Checking frontend/dist contents... - - ls -la dist - - ls -la dist/assets || echo "Assets directory not found!" - - cd .. - post_build: - commands: - - echo "Frontend build complete." -artifacts: - files: - - '**/*' - base-directory: frontend/dist -cache: - paths: - - 'frontend/node_modules/**' \ No newline at end of file diff --git a/buildspec-lambda-canary-deployment.yml b/buildspec-lambda-canary-deployment.yml deleted file mode 100644 index 47cabd6..0000000 --- a/buildspec-lambda-canary-deployment.yml +++ /dev/null @@ -1,61 +0,0 @@ -# buildspec-lambda-canary-deployment.yml -# This buildspec represents a placeholder for CodeDeploy's Lambda deployment hooks -# when performing a canary rollout. CodeDeploy manages traffic shifting. -# In this file, we assume CodeDeploy is handling the actual traffic shift based on -# its deployment group configuration. -# This specific buildspec is for demonstrating custom actions or post-traffic shift validation. -version: 0.2 - -# CodeDeploy's Lambda deployments don't typically run a buildspec.yml directly -# instead, CodeDeploy uses its own mechanism for traffic shifting and optional Lambda hooks. -# This buildspec exists as a conceptual placeholder if you were to have a CodeBuild -# action in CodePipeline that *manually* triggers a CodeDeploy Lambda deployment -# or performs post-deployment actions. - -# However, for AWS CodePipeline's direct integration with AWS CodeDeploy for Lambda, -# you use the 'AWS CodeDeploy' action type with 'DeploymentStyle: Lambda' in CodePipeline. -# This action type handles the traffic shifting directly, and doesn't explicitly -# execute a buildspec.yml for the traffic shift itself. - -# This buildspec is primarily for a scenario where you might want to perform -# custom pre-traffic hook validation or post-traffic hook validation. - -# Example: If you had a custom CodeBuild action for Post-Traffic Hook Validation -# phases: -# install: -# commands: -# - echo "Setting up environment for validation..." -# - sudo apt-get update -y -# - sudo apt-get install -y curl jq -# -# build: -# commands: -# - echo "Retrieving Lambda alias info for validation..." -# # You would typically pass the current version, target version, alias names -# # as environment variables to this CodeBuild project via CodePipeline. -# - CURRENT_LIVE_VERSION=$(aws lambda get-alias --function-name TransactionSimulatorAPI --name LIVE | jq -r '.FunctionVersion') -# - echo "Current LIVE alias version: $CURRENT_LIVE_VERSION" -# - # Perform actual validation against the new version or blended traffic -# - API_URL=$(jq -r '.api_gateway_invoke_url.value' terraform_outputs.json) # Assuming terraform_outputs.json is available -# - echo "Running health check on API: $API_URL" -# - curl -sS -f "$API_URL/api/login" # Example health check -# - echo "Validation successful." -# -# post_build: -# commands: -# - echo "Custom post-traffic hook validation complete." -# -# artifacts: -# files: [] # No specific artifacts needed from this stage - -# For now, this will be an empty buildspec as the primary canary deployment is handled by CodeDeploy's Lambda action itself. -# We will ensure the CodePipeline definition uses the direct CodeDeploy Lambda action type. -# This file still needs to exist to be referenced by a CodeBuild project in CodePipeline. -phases: - build: - commands: - - echo "This buildspec is a placeholder. Lambda traffic shifting is managed directly by AWS CodeDeploy." - - echo "CodeDeploy's Lambda deployments do not execute a buildspec.yml directly for traffic shifting." - - echo "Post-deployment smoke tests will occur in a separate stage after traffic is fully shifted." -artifacts: - files: [] \ No newline at end of file diff --git a/buildspec-lambda-package.yml b/buildspec-lambda-package.yml deleted file mode 100644 index 4fd9e7f..0000000 --- a/buildspec-lambda-package.yml +++ /dev/null @@ -1,59 +0,0 @@ -version: 0.2 - -phases: - install: - runtime-versions: - nodejs: 20 - commands: - - echo "Installing Node.js version 20 ..." - - n $NODE_20_VERSION - - echo "Installing dependencies for Lambda function..." - - cd lambda - - npm install - - echo "Dependencies installed." - - cd .. # Ensure you are back at the root of the source directory - build: - commands: - - echo "Zipping Lambda function code..." - - zip -r lambda.zip lambda - - echo "Lambda code zipped." - - export LAMBDA_CODE_S3_BUCKET="transaction-simulator-lambda-code-${AWS_ACCOUNT_ID}-${AWS_DEFAULT_REGION}" - - echo "Lambda S3 Bucket $LAMBDA_CODE_S3_BUCKET" - - export LAMBDA_S3_KEY="lambda-packages/lambda-${CODEBUILD_RESOLVED_SOURCE_VERSION}.zip" - - echo "Lambda S3 Key $LAMBDA_S3_KEY" - - echo "Uploading lambda.zip to S3..." - - aws s3 cp lambda.zip "s3://${LAMBDA_CODE_S3_BUCKET}/${LAMBDA_S3_KEY}" --metadata-directive REPLACE --acl bucket-owner-full-control --sse AES256 - - echo "Lambda.zip uploaded to S3." - - echo "LAMBDA_S3_BUCKET=$LAMBDA_CODE_S3_BUCKET" > build.env - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" >> build.env - - echo "build.env file created with S3 location details." - post_build: - commands: - - echo "Lambda packaging and upload completed." - - echo "--- Checking build.env location and content ---" - - ls -la build.env # Confirm build.env exists at root - - cat build.env # Display its content - - echo "--- End checking build.env ---" - - echo "Contents of current directory before artifact packaging:" # DIAGNOSTIC - - ls -laR $CODEBUILD_SRC_DIR # DIAGNOSTIC: List everything recursively from the source dir - - echo "End of diagnostic ls -laR" # DIAGNOSTIC - -artifacts: - files: - - lambda.zip - name: $(if [ "$CODEBUILD_INITIATOR" = "codepipeline" ]; then echo "LambdaArtifact"; else echo "BuildOutput"; fi) - discard-paths: no - secondary-artifacts: - LambdaArtifact: - files: - - lambda.zip - discard-paths: yes - LambdaS3LocationArtifact: - files: - - build.env - discard-paths: yes - name: LambdaPackageArtifact -cache: - paths: - - 'lambda/node_modules/**' - - '/usr/local/n/versions/node/**' \ No newline at end of file diff --git a/buildspec-smoke-tests.yml b/buildspec-smoke-tests.yml deleted file mode 100644 index 4cd752e..0000000 --- a/buildspec-smoke-tests.yml +++ /dev/null @@ -1,42 +0,0 @@ -# buildspec-smoke-tests.yml -# This buildspec runs automated tests against the deployed application (API Gateway and Frontend). -# This file will be placed in the root of your 'my-payment-simulator' repository. -version: 0.2 - -phases: - install: - commands: - # Install curl and jq for making HTTP requests and parsing JSON outputs. - - echo "Installing curl and jq for smoke tests..." - - sudo apt-get update -y - - sudo apt-get install -y curl jq - - build: - commands: - - echo "Running smoke tests..." - # Extract API Gateway and Frontend URLs from the 'terraform_outputs.json' artifact. - # This file is generated by the 'Terraform Apply' stage. - - API_URL=$(jq -r '.api_gateway_invoke_url.value' terraform_outputs.json) - - FRONTEND_URL=$(jq -r '.frontend_website_url.value' terraform_outputs.json) - - # Basic validation to ensure URLs are not empty before testing. - - if [ -z "$API_URL" ]; then echo "ERROR API_URL is empty. Cannot run API tests."; exit 1; fi - - if [ -z "$FRONTEND_URL" ]; then echo "ERROR FRONTEND_URL is empty. Cannot run Frontend tests."; exit 1; fi - - - echo "Testing API Gateway URL $API_URL" - # Perform a basic health check on the API login endpoint. - # '--fail' will cause curl to exit with an error code if the HTTP status is >= 400. - - curl -v --fail "$API_URL/api/login" - - echo "API Gateway smoke test passed." - - - echo "Testing Frontend URL $FRONTEND_URL" - # Check if the frontend is accessible by making a request to its root URL. - - curl -v --fail "$FRONTEND_URL" - - echo "Frontend smoke test passed." - - post_build: - commands: - - echo "Smoke tests completed successfully." - -artifacts: - files: [] # No specific artifacts needed from this final testing stage. \ No newline at end of file diff --git a/buildspec-static-analysis.yml b/buildspec-static-analysis.yml deleted file mode 100644 index 6a3d5b9..0000000 --- a/buildspec-static-analysis.yml +++ /dev/null @@ -1,88 +0,0 @@ -version: 0.2 - -env: - variables: - CHECK_FAIL_ON: "NONE" # Set to "NONE" or "FAILURE" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Checkov (IaC security scanner)..." - - pip install checkov - - - | - if [ -d "backend" ] && [ -f "backend/package.json" ]; then - echo "Installing backend dependencies..." - cd backend/ - npm ci --only=dev || npm install --only=dev # This will now run, but install nothing if no devDeps are listed - cd .. - else - echo "No backend/package.json found." - fi - - - | - if [ -d "frontend" ] && [ -f "frontend/package.json" ]; then - echo "Installing frontend dependencies..." - cd frontend/ - npm ci --only=dev || npm install --only=dev # This will now run, but install nothing if no devDeps are listed - cd .. - else - echo "No frontend/package.json found." - fi - - pre_build: - commands: - - echo "Verifying tool installations..." - - checkov --version - - node --version - build: - commands: - - echo "Running Checkov for IaC security scanning on Terraform files..." - - | - if [ -d "terraform" ]; then - checkov -d terraform/ --framework terraform --output cli --output json --output-file-path . || CHECKOV_EXIT_CODE=$? - if [ -f "results_json.json" ]; then - mv results_json.json checkov_results.json - else - echo '{"summary": {"failed": 0, "passed": 0, "skipped": 0}, "results": {"failed_checks": []}}' > checkov_results.json - fi - - # Check if we should fail on Checkov findings - if [ "$CHECK_FAIL_ON" == "FAILURE" ] && [ "${CHECKOV_EXIT_CODE:-0}" -ne 0 ]; then - echo "Checkov found security issues. Review checkov_results.json for details." - cat checkov_results.json - exit 1 - fi - else - echo "No terraform/ directory found, skipping Checkov scan" - echo '{"summary": {"failed": 0, "passed": 0, "skipped": 0}, "results": {"failed_checks": []}}' > checkov_results.json - fi - - post_build: - commands: - - echo "Static analysis complete. Generating summary report..." - - echo "=== STATIC ANALYSIS SUMMARY ===" - - | - if [ -f "checkov_results.json" ]; then - CHECKOV_FAILED=$(cat checkov_results.json | jq -r '.summary.failed // 0' 2>/dev/null || echo "0") - CHECKOV_PASSED=$(cat checkov_results.json | jq -r '.summary.passed // 0' 2>/dev/null || echo "0") - echo "Checkov: $CHECKOV_FAILED failed, $CHECKOV_PASSED passed" - fi - - echo "Review detailed results in the artifacts for specific findings." - -artifacts: - files: - - 'checkov_results.json' - name: static-analysis-results - -cache: - paths: - - '/usr/local/n/versions/node/**' - - '/root/.cache/pip/**' - - '/usr/local/lib/python3.11/site-packages/**' - - '/usr/local/lib/node_modules/**' - - 'backend/node_modules/**' - - 'frontend/node_modules/**' \ No newline at end of file diff --git a/buildspec-terraform-apply.yml b/buildspec-terraform-apply.yml deleted file mode 100644 index 235b6d5..0000000 --- a/buildspec-terraform-apply.yml +++ /dev/null @@ -1,46 +0,0 @@ -version: 0.2 - -env: - variables: - TF_VERSION: "1.7.5" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Terraform..." - - mkdir -p /tmp/terraform_install - - cd /tmp/terraform_install - - wget https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip - - unzip -o terraform_${TF_VERSION}_linux_amd64.zip - - mv terraform /usr/local/bin/ - - cd $CODEBUILD_SRC_DIR - - terraform version - - - echo "Loading Lambda S3 location from previous stage..." - # CRITICAL CHANGE: Use LambdaS3LocationArtifact as the directory name - - . LambdaS3LocationArtifact/build.env - - echo "LAMBDA_S3_BUCKET=$LAMBDA_S3_BUCKET" - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" - - build: - commands: - - echo "Initializing Terraform..." - - cd terraform - - terraform init -input=false -reconfigure - - - echo "Applying Terraform plan..." - - terraform apply -auto-approve ../TerraformPlanArtifact/tfplan -input=false - - post_build: - commands: - - echo "Generating Terraform outputs..." - - terraform output -json > ../terraform_outputs.json - - echo "Terraform apply completed." - -artifacts: - files: - - 'terraform_outputs.json' - name: TerraformOutputsArtifact \ No newline at end of file diff --git a/buildspec-terraform-plan.yml b/buildspec-terraform-plan.yml deleted file mode 100644 index 9eb156f..0000000 --- a/buildspec-terraform-plan.yml +++ /dev/null @@ -1,110 +0,0 @@ -version: 0.2 - -env: - variables: - TF_VERSION: "1.7.5" - TF_PLAN_FILE: "tfplan" - -phases: - install: - runtime-versions: - nodejs: 20 - python: 3.11 - commands: - - echo "Installing Terraform..." - - mkdir -p /tmp/terraform_install - - cd /tmp/terraform_install - - wget --tries=5 --timeout=30 https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip - - unzip -o terraform_${TF_VERSION}_linux_amd64.zip - - mv terraform /usr/local/bin/ - - cd $CODEBUILD_SRC_DIR - - echo "Verifying Terraform installation..." - - terraform --version - - pre_build: - commands: - - echo "=== COMPREHENSIVE DIAGNOSTICS START ===" - - echo "Current working directory:" - - pwd - - echo "CODEBUILD_SRC_DIR value:" - - echo $CODEBUILD_SRC_DIR - - echo "CODEBUILD_SRC_DIR_LambdaS3LocationArtifact value:" - - echo $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact - - echo "Contents of CODEBUILD_SRC_DIR:" - - ls -la $CODEBUILD_SRC_DIR - - echo "Recursive listing of CODEBUILD_SRC_DIR:" - - ls -laR $CODEBUILD_SRC_DIR - - echo "Contents of LambdaS3LocationArtifact:" - - ls -la $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact || echo "Directory not found or empty." - - echo "Recursive listing of LambdaS3LocationArtifact:" - - ls -laR $CODEBUILD_SRC_DIR_LambdaS3LocationArtifact || echo "Directory not found or empty." - - echo "Environment variables (filtered):" - - env | grep -E "(CODEBUILD|ARTIFACT)" || echo "No CODEBUILD/ARTIFACT env vars found" - - | - for dir in LambdaS3LocationArtifact LambdaPackageArtifact SourceOutput; do - echo "Checking for directory: $dir" - if [ -d "$dir" ]; then - echo " Directory $dir exists:" - ls -la "$dir/" - if [ -f "$dir/build.env" ]; then - echo " build.env found in $dir:" - cat "$dir/build.env" - else - echo " build.env NOT found in $dir" - fi - else - echo " Directory $dir does not exist" - fi - done - - echo "Searching for build.env in /codebuild/output:" - - find /codebuild/output -name "build.env" -type f 2>/dev/null || echo "No build.env files found" - - echo "=== COMPREHENSIVE DIAGNOSTICS END ===" - - build: - commands: - - echo "Loading Lambda S3 location from previous stage..." - - | - BUILD_ENV_PATH="" - for path in "$CODEBUILD_SRC_DIR_LambdaS3LocationArtifact" \ - "$CODEBUILD_SRC_DIR/LambdaS3LocationArtifact" \ - "$CODEBUILD_SRC_DIR/LambdaPackageArtifact"; do - if [ -f "$path/build.env" ]; then - BUILD_ENV_PATH="$path/build.env" - break - fi - done - - if [ -z "$BUILD_ENV_PATH" ]; then - echo "ERROR: build.env not found in any expected location!" - exit 1 - fi - - echo "Sourcing build.env from $BUILD_ENV_PATH" - . "$BUILD_ENV_PATH" - - echo "LAMBDA_S3_BUCKET=$LAMBDA_S3_BUCKET" - - echo "LAMBDA_S3_KEY=$LAMBDA_S3_KEY" - - | - if [ -z "$LAMBDA_S3_BUCKET" ] || [ -z "$LAMBDA_S3_KEY" ]; then - echo "ERROR: Required environment variables not set!" - exit 1 - fi - - | - if [ ! -d "terraform" ]; then - echo "ERROR: 'terraform' directory not found!" - exit 1 - fi - cd terraform - - terraform init -reconfigure - - terraform plan -var="lambda_s3_bucket=${LAMBDA_S3_BUCKET}" -var="lambda_s3_key=${LAMBDA_S3_KEY}" -out=${TF_PLAN_FILE} - - post_build: - commands: - - echo "Uploading Terraform plan artifact..." - - mv ${TF_PLAN_FILE} $CODEBUILD_SRC_DIR/ - - echo "Terraform plan completed and artifact uploaded." - -artifacts: - files: - - 'tfplan' - discard-paths: yes - name: TerraformPlanArtifact \ No newline at end of file diff --git a/frontend/dist/index.html b/frontend/dist/index.html deleted file mode 100644 index d67c570..0000000 --- a/frontend/dist/index.html +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - Payment Simulator - - - - -
- - diff --git a/terraform/frontend.tf b/terraform/frontend.tf index 2db6373..2f95c7c 100644 --- a/terraform/frontend.tf +++ b/terraform/frontend.tf @@ -73,9 +73,3 @@ resource "aws_s3_bucket_cors_configuration" "frontend_cors" { max_age_seconds = 3000 } } - -#frontend_website_url -output "frontend_website_url_output" { - description = "The URL of the S3 static website" - value = aws_s3_bucket_website_configuration.frontend_website.website_endpoint -} \ No newline at end of file diff --git a/terraform/lambda_api_gateway.tf b/terraform/lambda_api_gateway.tf index 0eb8044..abe4478 100644 --- a/terraform/lambda_api_gateway.tf +++ b/terraform/lambda_api_gateway.tf @@ -20,42 +20,43 @@ resource "aws_iam_role_policy_attachment" "lambda_policy" { policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" } -# Data source to get the S3 object details for the Lambda code -data "aws_s3_object" "lambda_code_object" { - bucket = var.lambda_s3_bucket - key = var.lambda_s3_key -} - # AWS Lambda Function Definition # 'publish = true' ensures a new Lambda version is created on every code change resource "aws_lambda_function" "api_lambda" { - function_name = "TransactionSimulatorAPI" - handler = "lambda.handler" - runtime = "nodejs20.x" - timeout = 60 - role = aws_iam_role.lambda_role.arn - # Use S3 bucket and key instead of local filename - s3_bucket = var.lambda_s3_bucket - s3_key = var.lambda_s3_key - source_code_hash = data.aws_s3_object.lambda_code_object.etag # Use ETag from S3 object for source_code_hash + function_name = "TransactionSimulatorAPI" + handler = "lambda.handler" + runtime = "nodejs20.x" + timeout = 60 + role = aws_iam_role.lambda_role.arn + + s3_bucket = aws_s3_bucket.lambda_code_bucket.bucket + s3_key = aws_s3_object.lambda_package.key + source_code_hash = data.archive_file.lambda_zip.output_sha publish = true - + environment { variables = { SUPABASE_URL = var.supabase_url SUPABASE_KEY = var.supabase_key } } + + depends_on = [aws_s3_object.lambda_package] } # AWS Lambda Alias for Production Traffic (e.g., 'LIVE') -# This alias will initially point to the current stable Lambda version. -# CodeDeploy will update this alias during a canary deployment. +# This alias will be managed by traffic-shifting automation in the deployment pipeline. resource "aws_lambda_alias" "live_alias" { - name = "LIVE" - function_name = aws_lambda_function.api_lambda.function_name - function_version = aws_lambda_function.api_lambda.version # Initially points to the current published version - description = "Alias for live production traffic. Managed by CodeDeploy for canary deployments." + name = "LIVE" + function_name = aws_lambda_function.api_lambda.function_name + description = "Alias for live production traffic." + + # Set an initial version on create, but allow the pipeline to manage future updates. + function_version = aws_lambda_function.api_lambda.version + + lifecycle { + ignore_changes = [function_version, routing_config] + } } # AWS Lambda Alias for Pre-production/Testing (e.g., 'BETA' or 'GREEN') @@ -63,16 +64,15 @@ resource "aws_lambda_alias" "live_alias" { resource "aws_lambda_alias" "beta_alias" { name = "BETA" function_name = aws_lambda_function.api_lambda.function_name - function_version = aws_lambda_function.api_lambda.version # Initially points to the current published version - description = "Alias for beta/pre-production testing. Can be used for manual verification." + function_version = aws_lambda_function.api_lambda.version + description = "Alias for beta/pre-production testing." } # API Gateway Configuration (pointing to the LIVE alias) resource "aws_lambda_permission" "api_gateway" { statement_id = "AllowAPIGatewayInvoke" action = "lambda:InvokeFunction" - # Point API Gateway to the LIVE alias ARN for invocation - function_name = aws_lambda_alias.live_alias.function_name # <--- Reference function name, not ARN for permission + function_name = aws_lambda_alias.live_alias.function_name principal = "apigateway.amazonaws.com" source_arn = "${aws_api_gateway_rest_api.api.execution_arn}/*/*" } @@ -102,12 +102,12 @@ resource "aws_api_gateway_method" "api_login_options" { } resource "aws_api_gateway_integration" "lambda_integration_login_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_login.id - http_method = aws_api_gateway_method.api_login_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_login.id + http_method = aws_api_gateway_method.api_login_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_login_post" { @@ -118,12 +118,12 @@ resource "aws_api_gateway_method" "api_login_post" { } resource "aws_api_gateway_integration" "lambda_integration_login_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_login.id - http_method = aws_api_gateway_method.api_login_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_login.id + http_method = aws_api_gateway_method.api_login_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_resource" "api_transactions" { @@ -154,21 +154,21 @@ resource "aws_api_gateway_method" "api_process_batch_options" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_integration" "lambda_integration_process_batch_options" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_process_batch.id - http_method = aws_api_gateway_method.api_process_batch_options.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_process_batch.id + http_method = aws_api_gateway_method.api_process_batch_options.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_transactions_post" { @@ -186,21 +186,21 @@ resource "aws_api_gateway_method" "api_process_batch_post" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_integration" "lambda_integration_process_batch_post" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_process_batch.id - http_method = aws_api_gateway_method.api_process_batch_post.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_process_batch.id + http_method = aws_api_gateway_method.api_process_batch_post.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } resource "aws_api_gateway_method" "api_transactions_get" { @@ -211,12 +211,12 @@ resource "aws_api_gateway_method" "api_transactions_get" { } resource "aws_api_gateway_integration" "lambda_integration_transactions_get" { - rest_api_id = aws_api_gateway_rest_api.api.id - resource_id = aws_api_gateway_resource.api_transactions.id - http_method = aws_api_gateway_method.api_transactions_get.http_method + rest_api_id = aws_api_gateway_rest_api.api.id + resource_id = aws_api_gateway_resource.api_transactions.id + http_method = aws_api_gateway_method.api_transactions_get.http_method integration_http_method = "POST" - type = "AWS_PROXY" - uri = aws_lambda_alias.live_alias.invoke_arn # <--- Point to LIVE alias invoke ARN + type = "AWS_PROXY" + uri = aws_lambda_alias.live_alias.invoke_arn } # Enable CORS for API Gateway @@ -271,8 +271,8 @@ resource "aws_api_gateway_method_response" "process_batch_options_200" { status_code = "200" response_parameters = { - "method.response.header.Access-Control-Allow-Origin" = true - "method.response.header.Access-Control-Allow-Methods" = true + "method.response.header.Access-Control-Allow-Origin" = true, + "method.response.header.Access-Control-Allow-Methods" = true, "method.response.header.Access-Control-Allow-Headers" = true } } @@ -302,8 +302,8 @@ resource "aws_api_gateway_integration_response" "process_batch_options_response" status_code = aws_api_gateway_method_response.process_batch_options_200.status_code response_parameters = { - "method.response.header.Access-Control-Allow-Origin" = "'*'" - "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,Authorization,X-Requested-With'" + "method.response.header.Access-Control-Allow-Origin" = "'*'", + "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,Authorization,X-Requested-With'", "method.response.header.Access-Control-Allow-Methods" = "'GET,POST,OPTIONS'" } @@ -414,4 +414,4 @@ output "lambda_live_alias_arn" { output "api_gateway_rest_api_id" { description = "API Gateway REST API ID" value = aws_api_gateway_rest_api.api.id -} \ No newline at end of file +} diff --git a/terraform/outputs.tf b/terraform/outputs.tf index 321c7fb..9abeb5f 100644 --- a/terraform/outputs.tf +++ b/terraform/outputs.tf @@ -19,21 +19,19 @@ output "frontend_bucket_name" { sensitive = false } -# Output the Lambda Function Name for CodePipeline/CodeDeploy reference +# Output the Lambda Function Name for reference output "lambda_function_name" { - description = "The name of the Lambda function for CodeDeploy." + description = "The name of the Lambda function" value = aws_lambda_function.api_lambda.function_name - # This export name should match what CodePipeline expects - # For cross-stack references, this output can be imported as "TransactionSimulatorLambdaFunctionName" } # Ensure these are also outputs if you want them visible/importable - output "lambda_live_alias_arn" { - description = "The ARN of the Lambda LIVE alias" - value = aws_lambda_alias.live_alias.arn - } +output "lambda_live_alias_arn" { + description = "The ARN of the Lambda LIVE alias" + value = aws_lambda_alias.live_alias.arn +} - output "lambda_beta_alias_arn" { - description = "The ARN of the Lambda BETA alias" - value = aws_lambda_alias.beta_alias.arn - } \ No newline at end of file +output "lambda_beta_alias_arn" { + description = "The ARN of the Lambda BETA alias" + value = aws_lambda_alias.beta_alias.arn +} diff --git a/terraform/s3_lambda_code_bucket.tf b/terraform/s3_lambda_code_bucket.tf index 1bfbab4..1b43ef9 100644 --- a/terraform/s3_lambda_code_bucket.tf +++ b/terraform/s3_lambda_code_bucket.tf @@ -5,7 +5,7 @@ data "aws_region" "current" {} # Create or reference the S3 bucket for Lambda code resource "aws_s3_bucket" "lambda_code_bucket" { bucket = "transaction-simulator-lambda-code-${data.aws_caller_identity.current.account_id}-${data.aws_region.current.name}" - + tags = { Name = "TransactionSimulatorLambdaCode" Project = "TransactionSimulator" @@ -37,12 +37,30 @@ resource "aws_s3_bucket_public_access_block" "lambda_code_bucket_pab" { # Bucket versioning for Lambda code bucket resource "aws_s3_bucket_versioning" "lambda_code_bucket_versioning" { bucket = aws_s3_bucket.lambda_code_bucket.id + versioning_configuration { status = "Enabled" } } -# Output the bucket name for use in other resources +# Package the Lambda function code from the local directory +# Ensure the GitHub Actions workflow installs dependencies before running Terraform +# so that node_modules are included in the archive when necessary. +data "archive_file" "lambda_zip" { + type = "zip" + source_dir = "${path.module}/../lambda" + output_path = "${path.module}/.terraform/lambda.zip" +} + +# Upload the packaged Lambda artifact to the managed bucket +resource "aws_s3_object" "lambda_package" { + bucket = aws_s3_bucket.lambda_code_bucket.id + key = "lambda-packages/${data.archive_file.lambda_zip.output_sha}.zip" + source = data.archive_file.lambda_zip.output_path + etag = filemd5(data.archive_file.lambda_zip.output_path) +} + +# Output the bucket name and key for reference output "lambda_code_bucket_name" { description = "Name of the S3 bucket storing Lambda deployment packages." value = aws_s3_bucket.lambda_code_bucket.bucket @@ -51,4 +69,9 @@ output "lambda_code_bucket_name" { output "lambda_code_bucket_arn" { description = "ARN of the S3 bucket storing Lambda deployment packages." value = aws_s3_bucket.lambda_code_bucket.arn -} \ No newline at end of file +} + +output "lambda_package_key" { + description = "Key of the Lambda package uploaded to S3." + value = aws_s3_object.lambda_package.key +} diff --git a/terraform/variables.tf b/terraform/variables.tf index 19c4001..b883457 100644 --- a/terraform/variables.tf +++ b/terraform/variables.tf @@ -1,21 +1,11 @@ - variable "supabase_url" { - description = "Supabase project URL" - type = string - sensitive = true - } +variable "supabase_url" { + description = "Supabase project URL" + type = string + sensitive = true +} - variable "supabase_key" { - description = "Supabase anon/public key" - type = string - sensitive = true - } - - variable "lambda_s3_bucket" { - description = "S3 bucket containing the Lambda deployment package" - type = string - } - - variable "lambda_s3_key" { - description = "S3 key (path) to the Lambda deployment package" - type = string - } \ No newline at end of file +variable "supabase_key" { + description = "Supabase anon/public key" + type = string + sensitive = true +} From 988d32fc7d2abd7774eea6f0878addf321b47e15 Mon Sep 17 00:00:00 2001 From: Amin <36745993+4Min4m@users.noreply.github.com> Date: Fri, 7 Nov 2025 19:29:18 +0100 Subject: [PATCH 2/2] Tighten GitHub Actions pipeline and clean Terraform outputs --- .github/workflows/ci-cd.yml | 40 ++++++++++++++++++++++++--------- appspec.yml | 2 +- lambda/lambda.js | 2 +- lambda/package.json | 2 +- terraform/lambda_api_gateway.tf | 25 --------------------- 5 files changed, 32 insertions(+), 39 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index db85b8e..08f257d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -50,7 +50,12 @@ jobs: - name: Install frontend dependencies working-directory: frontend - run: npm ci + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - name: Build frontend working-directory: frontend @@ -58,7 +63,12 @@ jobs: - name: Install Lambda dependencies working-directory: lambda - run: npm ci + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - name: Terraform init working-directory: terraform @@ -143,7 +153,12 @@ jobs: - name: Install frontend dependencies working-directory: frontend - run: npm ci + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - name: Build frontend working-directory: frontend @@ -151,7 +166,12 @@ jobs: - name: Install Lambda dependencies working-directory: lambda - run: npm ci + run: | + if [ -f package-lock.json ]; then + npm ci + else + npm install + fi - name: Terraform init working-directory: terraform @@ -240,11 +260,7 @@ jobs: aws lambda update-alias \ --function-name "$FUNCTION_NAME" \ --name LIVE \ - --function-version "$PREVIOUS_VERSION" - - aws lambda update-alias \ - --function-name "$FUNCTION_NAME" \ - --name LIVE \ + --function-version "$PREVIOUS_VERSION" \ --routing-config "AdditionalVersionWeights={\"$NEW_VERSION\"=$ROUTING_WEIGHT}" smoke-tests: @@ -279,9 +295,11 @@ jobs: exit 1 fi - echo "Testing backend login endpoint at ${API_URL}/login" + LOGIN_ENDPOINT="${API_URL%/}/api/login" + + echo "Testing backend login endpoint at ${LOGIN_ENDPOINT}" API_STATUS=$(curl -sS -o api_response.json -w '%{http_code}' \ - -X POST "$API_URL/login" \ + -X POST "$LOGIN_ENDPOINT" \ -H 'Content-Type: application/json' \ -d '{"username":"admin","password":"password123"}') diff --git a/appspec.yml b/appspec.yml index 83d0454..e412f75 100644 --- a/appspec.yml +++ b/appspec.yml @@ -9,4 +9,4 @@ Resources: TargetVersion: "2" Hooks: - BeforeAllowTraffic: "TransactionSimulatorAPIPreTrafficHook" - - AfterAllowTraffic: "TransactionSimulatorAPIPostTrafficHook" \ No newline at end of file + - AfterAllowTraffic: "TransactionSimulatorAPIPostTrafficHook" diff --git a/lambda/lambda.js b/lambda/lambda.js index f48ae50..4186dcc 100644 --- a/lambda/lambda.js +++ b/lambda/lambda.js @@ -299,4 +299,4 @@ if (path === "/api/transactions" && httpMethod === "POST") { method: httpMethod }) }; -}; \ No newline at end of file +}; diff --git a/lambda/package.json b/lambda/package.json index c0f4e4a..8135536 100644 --- a/lambda/package.json +++ b/lambda/package.json @@ -5,4 +5,4 @@ "@supabase/supabase-js": "^2.49.3", "node-fetch": "^3.3.2" } -} \ No newline at end of file +} diff --git a/terraform/lambda_api_gateway.tf b/terraform/lambda_api_gateway.tf index abe4478..3017a7b 100644 --- a/terraform/lambda_api_gateway.tf +++ b/terraform/lambda_api_gateway.tf @@ -390,28 +390,3 @@ resource "aws_cloudwatch_log_group" "api_gateway_logs" { retention_in_days = 7 } -# Outputs -output "api_gateway_invoke_url" { - description = "API Gateway invoke URL" - value = "${aws_api_gateway_stage.prod_stage.invoke_url}/api" -} - -output "lambda_function_name" { - description = "Lambda function name" - value = aws_lambda_function.api_lambda.function_name -} - -output "lambda_function_arn" { - description = "Lambda function ARN" - value = aws_lambda_function.api_lambda.arn -} - -output "lambda_live_alias_arn" { - description = "Lambda LIVE alias ARN" - value = aws_lambda_alias.live_alias.arn -} - -output "api_gateway_rest_api_id" { - description = "API Gateway REST API ID" - value = aws_api_gateway_rest_api.api.id -}