DailyCodeDeploy is built as a modular, scalable system with emphasis on simplicity and security.
βββββββββββββββββββ βββββββββββββββββββ βββββββββββββββββββ
β Frontend β β API Gateway β β GitHub API β
β (Static) βββββΊβ (Express.js) βββββΊβ (OAuth) β
βββββββββββββββββββ βββββββββββββββββββ βββββββββββββββββββ
β
βΌ
βββββββββββββββββββ βββββββββββββββββββ βββββββββββββββββββ
β Job Queue βββββΊβ Core Engine βββββΊβ File System β
β (Redis opt.) β β (Node.js) β β (Temp/Logs) β
βββββββββββββββββββ βββββββββββββββββββ βββββββββββββββββββ
β
βΌ
βββββββββββββββββββ
β Pipeline β
β Executor β
β (Shell/Docker)β
βββββββββββββββββββ
Technologies: HTML5, CSS3, Vanilla JavaScript
Location: /frontend/
frontend/
βββ index.html # Main page
βββ benefits.md # About project (Jekyll)
βββ _config.yml # Jekyll configuration
βββ assets/
βββ styles.css # Interface styles
βββ script.js # Client logic
Features:
- Fully static (GitHub Pages compatible)
- Responsive design
- Progressive enhancement
- Minimal dependencies
Technologies: Node.js, Express.js
Location: /backend/
backend/
βββ server.js # HTTP server and routing
βββ queue.js # Queue system
βββ runner.js # Pipeline executor
βββ package.json # Dependencies
βββ .env.example # Environment template
βββ data/
βββ users.json # Temp user storage
// API Routes
GET /api/repos // Repository list
POST /api/pipeline/run // Run pipeline
GET /api/pipeline/status/:id // Execution status
POST /api/auth/github // GitHub OAuth
GET /api/logs/:id // Execution logsCurrent State: JSON files Planned: PostgreSQL/MongoDB
// users.json structure
{
"users": [
{
"id": "user123",
"github_token": "encrypted_token",
"username": "developer",
"repositories": ["repo1", "repo2"],
"last_login": "2025-09-23T10:00:00Z"
}
]
}System: Built-in logging + planned ELK Stack
// Log structure
{
"timestamp": "2025-09-23T10:00:00Z",
"level": "info|warn|error",
"component": "api|queue|runner",
"user_id": "user123",
"action": "pipeline_start",
"metadata": {
"repository": "user/repo",
"pipeline_id": "pip_123"
}
}GitHub Webhook/Manual β Queue β Executor β Results
β β β β
βΌ βΌ βΌ βΌ
Validation Job Storage Sandboxed Logs &
& Auth & Scheduling Execution Cleanup
-
Request Validation
function validatePipelineRequest(req) { // Auth check // Input sanitization // Resource limits check return validation_result; }
-
Job Queuing
const job = { id: generateId(), user_id: req.user.id, repository: req.body.repo, steps: sanitizeSteps(req.body.steps), created_at: new Date(), status: 'queued' }; await queue.add(job);
-
Execution Environment
# Sandboxed execution mkdir -p /tmp/pipeline_${JOB_ID} cd /tmp/pipeline_${JOB_ID} # Clone repository git clone ${REPO_URL} . # Execute steps for step in "${STEPS[@]}"; do timeout 300s bash -c "$step" done
-
Results Collection
const result = { job_id: job.id, status: 'success|failed|timeout', output: capturedOutput, error: capturedError, duration: executionTime, completed_at: new Date() };
# Core settings
NODE_ENV=production
PORT=5000
HOST=0.0.0.0
# GitHub Integration
GITHUB_CLIENT_ID=your_client_id
GITHUB_CLIENT_SECRET=your_secret
GITHUB_REDIRECT_URI=http://localhost:5000/auth/callback
# Security
SESSION_SECRET=random_secure_string
ENCRYPTION_KEY=32_byte_encryption_key
# Optional: Redis Queue
REDIS_URL=redis://localhost:6379
# Execution Limits
MAX_EXECUTION_TIME=300
MAX_CONCURRENT_JOBS=10
MAX_LOG_SIZE=1048576# Dockerfile
FROM node:18-alpine
WORKDIR /app
# Security: non-root user
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nodejs -u 1001
# Dependencies
COPY package*.json ./
RUN npm ci --only=production
# Application code
COPY --chown=nodejs:nodejs . .
USER nodejs
EXPOSE 5000
CMD ["npm", "start"]# docker-compose.yml
version: '3.8'
services:
app:
build: .
ports:
- "5000:5000"
environment:
- NODE_ENV=production
volumes:
- ./logs:/app/logs
depends_on:
- redis
restart: unless-stopped
redis:
image: redis:7-alpine
volumes:
- redis_data:/data
restart: unless-stopped
volumes:
redis_data:// /api/health endpoint
app.get('/api/health', (req, res) => {
const health = {
status: 'healthy',
timestamp: new Date(),
version: process.env.npm_package_version,
uptime: process.uptime(),
memory: process.memoryUsage(),
load: {
queue_size: queue.size(),
active_jobs: runner.activeJobs(),
cpu_usage: os.loadavg()
}
};
res.json(health);
});// Prometheus metrics (planned)
const promClient = require('prom-client');
const metrics = {
http_requests: new promClient.Counter({
name: 'http_requests_total',
help: 'Total HTTP requests',
labelNames: ['method', 'status']
}),
pipeline_duration: new promClient.Histogram({
name: 'pipeline_duration_seconds',
help: 'Pipeline execution duration'
}),
active_pipelines: new promClient.Gauge({
name: 'active_pipelines',
help: 'Currently running pipelines'
})
};User β GitHub OAuth β Access Token β API Requests
β β β β
βΌ βΌ βΌ βΌ
Browser GitHub API Session Store Protected
Callback (Memory/Redis) Resources
// Permission-based access
const permissions = {
'repo:read': ['public_repo'],
'repo:write': ['repo'],
'admin': ['admin:org']
};
function checkPermission(user, action, resource) {
const required = permissions[action];
const granted = user.github_scopes;
return required.some(scope => granted.includes(scope));
}// Command sanitization pipeline
function sanitizeCommand(input) {
// 1. Remove dangerous characters
const cleaned = input.replace(/[;&|`$(){}[\]]/g, '');
// 2. Whitelist allowed commands
const allowedCommands = ['npm', 'git', 'echo', 'ls', 'pwd'];
const firstWord = cleaned.split(' ')[0];
if (!allowedCommands.includes(firstWord)) {
throw new Error('Command not allowed');
}
// 3. Length limits
if (cleaned.length > 1000) {
throw new Error('Command too long');
}
return cleaned;
}# Development setup
git clone https://github.com/NickScherbakov/daily-code-deploy.git
cd daily-code-deploy
# Install dependencies
npm install
cd backend && npm install && cd ..
# Start development server
npm run dev # Watches for changes# Production with Docker
docker-compose up -d# Install PM2
npm install -g pm2
# Start application
pm2 start backend/server.js --name "daily-code-deploy"
pm2 startup
pm2 save# k8s/deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: daily-code-deploy
spec:
replicas: 3
selector:
matchLabels:
app: daily-code-deploy
template:
metadata:
labels:
app: daily-code-deploy
spec:
containers:
- name: app
image: daily-code-deploy:latest
ports:
- containerPort: 5000
env:
- name: NODE_ENV
value: "production"Load Balancer (nginx/HAProxy)
β
βββ App Instance 1 ββ
βββ App Instance 2 ββ€ββ Shared Redis Queue
βββ App Instance 3 ββ
β
Database Cluster
// Connection pooling
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
// Caching layer
const cache = new Redis({
host: process.env.REDIS_HOST,
maxRetriesPerRequest: 3,
retryDelayOnFailover: 100,
});// Pipeline resource limits
const limits = {
maxConcurrentPipelines: 50,
maxPipelineDuration: 600, // 10 minutes
maxOutputSize: 1024 * 1024, // 1MB
maxCpuUsage: 80, // percent
maxMemoryUsage: 512 * 1024 * 1024 // 512MB
};src/
βββ controllers/ # Request handlers
βββ middleware/ # Express middleware
βββ services/ # Business logic
βββ models/ # Data models
βββ utils/ # Helper functions
βββ config/ # Configuration
βββ tests/ # Test suites
// Unit tests
describe('Pipeline Controller', () => {
test('should validate input correctly', () => {
// Test implementation
});
});
// Integration tests
describe('API Endpoints', () => {
test('POST /api/pipeline/run', async () => {
// Test full request flow
});
});
// End-to-end tests
describe('Complete Pipeline Flow', () => {
test('should execute pipeline successfully', async () => {
// Test complete user journey
});
});# Development workflow
git checkout main
git pull origin main
git checkout -b feature/new-feature
# Development work
npm test
npm run lint
npm run build
# Submit changes
git commit -m "feat: add new feature"
git push origin feature/new-feature
# Create Pull RequestDocument Version: 1.0
Last Updated: September 23, 2025
Status: Living document - regularly updated π