diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..732700a
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,64 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+# Node modules and build outputs
+node_modules
+npm-debug.log
+yarn-error.log
+pnpm-debug.log
+.turbo
+dist
+build
+.next
+out
+
+# Dependencies cache
+.bun
+bun.lockb
+
+# IDE and editors
+.vscode
+.idea
+*.swp
+*.swo
+*~
+.DS_Store
+
+# Environment and config
+.env
+.env.local
+.env.*.local
+.eslintcache
+
+# Documentation
+*.md
+docs/
+README.md
+
+# OS files
+Thumbs.db
+.DS_Store
+
+# CI/CD
+.github
+.gitlab-ci.yml
+
+# Docker
+Dockerfile*
+docker-compose*
+.dockerignore
+
+# Testing
+*.test.ts
+*.spec.ts
+coverage
+.nyc_output
+
+# Misc
+.prettierignore
+.prettierrc
+biome.json
+tsconfig.json
+turbo.json
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..3f31270
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,67 @@
+name: CI
+
+on:
+ push:
+ branches: ["main", "feat/container-readiness"]
+ pull_request:
+ branches: ["main"]
+
+jobs:
+ build:
+ name: Build & Test
+ runs-on: ubuntu-latest
+ timeout-minutes: 15
+
+ env:
+ # Database URL for 'init-db' step (connecting to exposed docker port)
+ DATABASE_URL: postgresql://postgres:postgres@localhost:5432/base0
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+
+ - name: Setup Bun
+ uses: oven-sh/setup-bun@v1
+ with:
+ bun-version: latest
+
+ - name: Cache Bun dependencies
+ uses: actions/cache@v4
+ with:
+ path: ~/.bun/install/cache
+ key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb') }}
+ restore-keys: |
+ ${{ runner.os }}-bun-
+
+ - name: Install dependencies
+ run: bun install --frozen-lockfile
+
+ - name: Check Code Quality (Lint & Biome)
+ run: bun run check
+
+ - name: Type Check
+ run: bun run type-check
+
+ - name: Build
+ run: bun run build
+
+ - name: Start Infrastructure
+ run: |
+ docker compose up -d --build postgres api minio
+
+ # Wait for API health check
+ echo "Waiting for API to be healthy..."
+ timeout 60s sh -c 'until curl -s http://localhost:3001/health | grep "OK"; do sleep 2; done'
+ echo "Infrastructure is ready!"
+
+ - name: Init Test Database
+ run: |
+ # The postgres container is exposed on 5432
+ cd packages/db
+ bun run db:push
+
+ - name: Run Tests
+ run: bun run test
+ env:
+ # Ensure tests target the local exposed port which maps to the container
+ API_URL: http://localhost:3001/v1
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..eae70b9
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,113 @@
+# Contributing to Base0
+
+Thank you for your interest in contributing to Base0. This guide is intended to help you understand the development process, potential areas for contribution, and how to submit successful Pull Requests.
+
+## Understanding the Project
+
+Before contributing, we recommend reviewing the core documentation to understand the project's architecture and future direction:
+
+* **Project Plan**: See [docs/plan.md](docs/plan.md) for the architectural overview and features.
+* **Future Roadmap**: See [docs/roadmap.md](docs/roadmap.md) for planned phases and upcoming features.
+
+## How Can You Contribute?
+
+We welcome contributions in several forms:
+
+1. **Reporting Bugs**: If you encounter unintended behavior, please open an Issue describing the reproduction steps.
+2. **Suggesting Features**: Check the Roadmap first. If your idea isn't listed, propose it via an Issue.
+3. **Codebase Improvements**: You can pick up "Good First Issues" or work on roadmap items (Vector DB, Realtime, Enterprise Auth).
+4. **Documentation**: Improving the clarity of our specific guides or inline comments.
+
+## Development Environment
+
+### Prerequisites
+
+* **Runtime**: Bun (v1.2 or higher)
+* **Container**: Docker (required for the local PostgreSQL instance)
+
+### Setup Instructions
+
+1. **Clone the repository**:
+ ```bash
+ git clone https://github.com/itisrohit/base0.git
+ cd base0
+ ```
+
+2. **Install dependencies**:
+ ```bash
+ bun install
+ ```
+
+3. **Configure Environment**:
+ ```bash
+ cp .env.example .env
+ ```
+
+4. **Start Infrastructure**:
+ ```bash
+ docker-compose up -d db
+ ```
+
+5. **Initialize Database**:
+ ```bash
+ bun run db:push
+ ```
+
+6. **Start Development Server**:
+ This will start both the API and the Dashboard concurrently:
+ ```bash
+ bun run dev
+ ```
+ * **Dashboard**: http://localhost:5173
+ * **API**: http://localhost:3001
+
+## Operations
+
+We use **Turborepo** to manage tasks across the workspace.
+
+* `bun run build` - Build all apps and packages.
+* `bun run lint` - Lint all code using Biome.
+* `bun run check` - Run type-checking and linting verification.
+
+## Project Structure
+
+* `apps/api`: The Hono-based backend core.
+* `apps/dashboard`: The React 19 mission control dashboard.
+* `packages/db`: Shared Drizzle ORM schema and database utilities.
+
+## Pull Request Process
+
+1. **Fork the Repository**: Create a fork of the repository to your own GitHub account.
+2. **Create a Branch**: Create a new branch from `main` with a descriptive name (e.g., `feat/vector-search` or `fix/auth-bug`).
+3. **Implement Changes**: write your code, ensuring it adheres to the project's coding standards.
+4. **Verify**: Run `bun run check` to ensure your code passes all linting and type-safety checks.
+5. **Commit**: Use Conventional Commits for your messages (e.g., `feat: add realtime websocket server`).
+6. **Submit PR**: Push your branch and open a Pull Request against the `main` branch. Provide a clear description of the changes and link any related issues.
+
+### Pull Request Format
+
+Please copy and paste the following template into your PR description:
+
+```markdown
+## Summary
+
+
+## Type of Change
+
+- [ ] Bug fix (non-breaking change which fixes an issue)
+- [ ] New feature (non-breaking change which adds functionality)
+- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
+- [ ] Documentation update
+
+## Checklist
+- [ ] I have performed a self-review of my own code
+- [ ] I have commented my code, particularly in hard-to-understand areas
+- [ ] I have added tests that prove my fix is effective or that my feature works
+- [ ] My changes generate no new linting or build errors
+```
+
+## Coding Standards
+
+* **Strict Typed**: We enforce strict TypeScript configurations. Avoid usage of `any`.
+* **Linter**: We use Biome. Ensure your code is formatted before submission.
+
diff --git a/README.md b/README.md
index 590f2c0..0c8063a 100644
--- a/README.md
+++ b/README.md
@@ -1,96 +1,113 @@
-# Base0
+
+
+
-A high-performance, lightweight Backend-as-a-Service (BaaS) built for the edge-native era.
+
+ A minimal backend platform
+
-Base0 provides essential backend primitives including authentication, collection-based data storage, and blob storage, all with a zero-trust architecture and type-safe developer experience.
+
+
+
+
+
+
+
+
+
+
+
+
+## Overview
+
+A lightweight backend platform providing authentication, document storage, file handling, and RBAC through a type-safe API and React-based control plane.
## Architecture
-Base0 is built as a monorepo leveraging Bun and Turborepo. It focuses on minimalist system design, using Web Standards (Request/Response) to ensure compatibility across various edge runtimes.
+The platform is engineered as a monorepo leveraging Bun and Turborepo. It prioritizes:
+- **Edge-native compatibility**: Designed to run efficiently on modern runtimes.
+- **Type Safety**: End-to-end typing shared between client and server.
+- **Minimalism**: Leveraging standard Web APIs (Request/Response) over heavy framework abstractions.
## Core Features
### Authentication
-- JWT-based session management with access and refresh tokens.
-- Secure password hashing using Argon2id.
-- Passwordless login via Magic Links.
-- Pluggable OAuth2 interface (GitHub/Google).
+- JWT-based session management with automatic access and refresh token rotation.
+- Secure, industry-standard password hashing using Argon2id.
+- Passwordless authentication flows using Magic Links.
+- Extensible OAuth2 interface supporting providers like GitHub and Google.
### Data Engine
-- Dynamic schema definition with runtime Zod validation.
-- Multi-tenant isolation through project-based logical separation.
-- Advanced querying using PostgreSQL JSONB.
+- Dynamic schema definitions utilizing runtime Zod validation.
+- Strict multi-tenant isolation via logical project separation.
+- Advanced querying capabilities leveraging PostgreSQL JSONB.
### Blob Storage
-- Pluggable driver architecture supporting Local and S3 (AWS/MinIO/R2) storage.
-- Secure upload and streaming download.
+- Pluggable driver architecture supporting Local filesystem and S3-compatible providers (AWS, MinIO, R2).
+- Secure, presigned upload mechanisms and streaming downloads.
+- Integrated file metadata tracking and management.
### Access Control
-- Granular Role-Based Access Control (Owner, Admin, Member, Viewer).
-- Project-scoped API keys with surgical scope enforcement.
-- Integrated rate limiting.
+- Granular Role-Based Access Control (RBAC) with Owner, Admin, Member, and Viewer roles.
+- Project-scoped API keys with precise permission enforcement.
+- Native rate limiting to prevent abuse.
## Technology Stack
-- **Server Runtime:** Bun v1.2+
-- **API Framework:** Hono
-- **Database:** PostgreSQL with Drizzle ORM
-- **Dashboard:** React 19, Vite 6, Tailwind CSS v4
-- **Routing:** TanStack Router
-- **State Management:** TanStack Query
-- **Tooling:** Turborepo, Biome
+- **Runtime**: Bun v1.2+
+- **API Framework**: Hono
+- **Database**: PostgreSQL (JSONB) with Drizzle ORM
+- **Dashboard**: React 19, Vite 6, Tailwind CSS v4
+- **Routing**: TanStack Router
+- **State Management**: TanStack Query
+- **Tooling**: Turborepo, Biome
## Getting Started
### Prerequisites
-- Bun v1.2 or higher
-- Docker (for local PostgreSQL)
+- **Bun**: v1.2 or higher
+- **Docker**: Required for local PostgreSQL instance
-### Setup
+### Installation
-1. Clone the repository:
- ```bash
- git clone https://github.com/itisrohit/base0.git
- cd base0
- ```
+1. **Clone the repository**
+ ```bash
+ git clone https://github.com/itisrohit/base0.git
+ cd base0
+ ```
-2. Install dependencies:
- ```bash
- bun install
- ```
+2. **Install dependencies**
+ ```bash
+ bun install
+ ```
-3. Configure environment:
- ```bash
- cp .env.example .env
- ```
+3. **Configure environment**
+ ```bash
+ cp .env.example .env
+ ```
-4. Initialize the database:
- ```bash
- docker compose up -d
- cd packages/db
- bun run db:push
- cd ../..
- ```
+4. **Initialize infrastructure**
+ Start the database container and apply schema migrations:
+ ```bash
+ docker-compose up -d db
+ bun run db:push
+ ```
### Development
-To start the API and Dashboard simultaneously:
+To start the API and Dashboard simultaneously in development mode:
```bash
bun run dev
```
-- API Server: `http://localhost:3001`
-- Dashboard: `http://localhost:3000`
-
-## Repository Structure
+- **API Server**: http://localhost:3001
+- **Dashboard**: http://localhost:3000
-The project is organized into logical workspaces:
+## Contributing
-- `apps/api`: The Hono-based core service.
-- `apps/dashboard`: The React-based administrative interface.
-- `packages/db`: Shared database schema and Drizzle configuration.
+Please review [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
## License
diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile
new file mode 100644
index 0000000..bca2154
--- /dev/null
+++ b/apps/api/Dockerfile
@@ -0,0 +1,83 @@
+# Multi-stage build for development environment
+FROM oven/bun:1.2.16-alpine AS base
+# Install dependencies only when needed
+FROM base AS deps
+# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
+RUN apk add --no-cache libc6-compat
+WORKDIR /app
+
+# Install dependencies based on the preferred package manager
+COPY package.json bun.lock ./
+COPY apps/api/package.json ./apps/api/
+COPY apps/dashboard/package.json ./apps/dashboard/
+COPY packages/db/package.json ./packages/db/
+RUN bun install --frozen-lockfile
+
+# Rebuild the source code only when needed
+FROM base AS builder
+WORKDIR /app
+COPY --from=deps /app/node_modules ./node_modules
+
+COPY . .
+# Build the API
+RUN cd apps/api && bun run build
+
+# Development runner image, copy all the files and run bun
+FROM oven/bun:1.2.16-alpine AS runner
+WORKDIR /app
+
+ENV NODE_ENV=development
+# Install curl for health checks
+RUN apk add --no-cache curl
+
+# Add a non-root user for security
+RUN addgroup --system --gid 1001 bun-nodejs && \
+ adduser --system --uid 1001 bun-nodejs
+
+# Create startup script
+COPY < {
diff --git a/apps/api/src/middleware/rate-limit.ts b/apps/api/src/middleware/rate-limit.ts
index 6d803b8..21821b3 100644
--- a/apps/api/src/middleware/rate-limit.ts
+++ b/apps/api/src/middleware/rate-limit.ts
@@ -69,7 +69,7 @@ export const rateLimit = (config: RateLimitConfig) => {
*/
export const standardRateLimit = rateLimit({
windowMs: 60 * 1000,
- max: 500,
+ max: parseInt(process.env.RATE_LIMIT_MAX || '500', 10),
keyGenerator: (c) => {
// Try to limit by API Key or User ID if authenticated, else IP
const variables = c as unknown as { Var: { auth: { keyId?: string; userId?: string } } };
diff --git a/apps/api/tests/advanced.test.ts b/apps/api/tests/advanced.test.ts
index a53f1a3..0d3839e 100644
--- a/apps/api/tests/advanced.test.ts
+++ b/apps/api/tests/advanced.test.ts
@@ -117,10 +117,10 @@ describe('Base0 Advanced Primitives: Rate Limiting & Scopes', () => {
// The limit is 500 per minute. We need to exceed it.
// Batch requests to prevent ECONNRESET
const hammerKey = writeKey;
-
let rateLimited = false;
- const batchSize = 50;
- const totalRequests = 510;
+ const batchSize = 20;
+ // Limit is 100 in docker-compose, so 150 should effectively trigger it
+ const totalRequests = 150;
for (let i = 0; i < totalRequests; i += batchSize) {
const batch = [];
@@ -139,12 +139,19 @@ describe('Base0 Advanced Primitives: Rate Limiting & Scopes', () => {
);
}
const results = await Promise.all(batch);
+
+ // Log first status of batch for debug
+ console.log(`Batch request #${i} status:`, results[0]);
+
if (results.some((status) => status === 429)) {
rateLimited = true;
+ console.log('Rate limit hit at request #', i + batchSize);
break;
}
+ // Small delay to allow server to process
+ await new Promise((r) => setTimeout(r, 10));
}
expect(rateLimited).toBe(true);
- }, 45000);
+ }, 30000);
});
diff --git a/apps/api/tests/integration.test.ts b/apps/api/tests/integration.test.ts
index c4dbaa9..facdc4a 100644
--- a/apps/api/tests/integration.test.ts
+++ b/apps/api/tests/integration.test.ts
@@ -184,6 +184,11 @@ describe('Base0 Integration Tests', () => {
},
body: formData,
});
+
+ if (res.status !== 201) {
+ console.log('Upload failed:', res.status, await res.text());
+ }
+
const data = await res.json();
expect(res.status).toBe(201);
fileId = data.file.id;
diff --git a/apps/api/tests/rbac.test.ts b/apps/api/tests/rbac.test.ts
index a6bc942..1f599d2 100644
--- a/apps/api/tests/rbac.test.ts
+++ b/apps/api/tests/rbac.test.ts
@@ -54,7 +54,7 @@ describe('Base0 RBAC & Collaboration Tests', () => {
});
const aData = await res3.json();
adminToken = aData.accessToken;
- });
+ }, 30000);
test('2. Owner creates project', async () => {
const res = await apiFetch('/projects', {
diff --git a/apps/dashboard/Dockerfile b/apps/dashboard/Dockerfile
new file mode 100644
index 0000000..4b99d25
--- /dev/null
+++ b/apps/dashboard/Dockerfile
@@ -0,0 +1,138 @@
+# Multi-stage build for development environment
+FROM oven/bun:1.2.16-alpine AS base
+# Install dependencies only when needed
+FROM base AS deps
+RUN apk add --no-cache libc6-compat
+WORKDIR /app
+
+# Install dependencies based on the preferred package manager
+COPY package.json bun.lock ./
+COPY apps/api/package.json ./apps/api/
+COPY apps/dashboard/package.json ./apps/dashboard/
+COPY packages/db/package.json ./packages/db/
+RUN bun install --frozen-lockfile
+
+# Rebuild the source code only when needed
+FROM base AS builder
+WORKDIR /app
+COPY --from=deps /app/node_modules ./node_modules
+
+COPY . .
+# Build the dashboard using bun
+RUN cd apps/dashboard && bun run build
+
+# Development runner image - serve static files with nginx
+FROM nginx:1.25-alpine AS runner
+WORKDIR /usr/share/nginx/html
+
+# Install curl for health checks
+RUN apk add --no-cache curl
+
+# Copy the built application
+COPY --from=builder /app/apps/dashboard/dist ./
+
+# Copy nginx configuration
+COPY < users)
-* `config` (jsonb)
-
-#### `api_keys`
-* `id` (uuid)
-* `key_id` (nanoId - public identifier)
-* `key_hash` (argon2 hashed secret)
-* `project_id`
-* `scopes` (text[])
-
-#### `project_members`
-* `id` (uuid)
-* `project_id`
-* `user_id`
-* `role` (owner, admin, member, viewer)
-
-#### `collections`
-* `id`
-* `project_id`
-* `schema_def` (jsonb - stores field types)
-* `permissions` (jsonb)
-
-#### `documents`
-* `id`
-* `collection_id`
-* `data` (jsonb)
-* `vector_embedding` (placeholder for future AI search)
-* `created_at`
-
-#### `buckets`
-* `id` (uuid)
-* `project_id`
-* `name`
-* `config` (jsonb)
-
-#### `files`
-* `id` (uuid)
-* `bucket_id`
-* `name`
-* `path` (storage path)
-* `size`
-* `mime_type`
-
-#### `magic_links`
-* `id` (uuid)
-* `token_hash` (unique index)
-* `user_email`
-* `expires_at`
-* `used` (boolean)
-
-#### `oauth_accounts`
-* `id` (uuid)
-* `provider` (text: 'github', 'google')
-* `provider_user_id`
-* `user_id` (fk -> users)
+
+
+### Database Schema (v1 Summary)
+
+The schema is built on **PostgreSQL 18** and managed via **Drizzle ORM**. It is fully typed and normalized.
+
+* **Auth & IAM**: `users`, `project_members`, `api_keys` (RBAC, Scoped Access).
+* **Data Engine**: `projects` (Tenant root), `collections` (Dynamic Schema), `documents` (JSONB Data).
+* **Storage**: `buckets` (Config), `files` (Metadata & Paths).
+* **Integrations**: `magic_links` (Passwordless), `oauth_accounts` (Social Login).
+
+Full schema definition available in `packages/db/schema.ts`.
---
@@ -212,40 +156,44 @@ This project demonstrates:
* ✅ **Usage Telemetry UI**: Dashboard with API request trends and storage consumption metrics - Implemented
### Phase 5: Refinement & QA (In Progress)
-* 🔄 **OpenAPI / Swagger auto-generation**: Self-documenting API using `@hono/zod-openapi`
* ✅ **Integration Tests**: Comprehensive test suite (33+ tests) covering RBAC, Auth, and Storage - Implemented
-* 🔄 **Production Readiness**: Docker optimization, health checks, and security headers
-* 🔄 **Deployment Guides**: Step-by-step instructions for AWS, Fly.io, and self-hosting
+* ✅ **Container Readiness**: Docker optimization, health checks, and resource management - Implemented
---
-## Technical Notes & Production Path
+## Technical Notes & Deployment Steps
### 1. Usage Telemetry
* **Current implementation**: The `/usage` endpoint returns high-fidelity **mock telemetry data** (generated on the server) to demonstrate the Recharts visualization capabilities in the dashboard.
-* **Production Path**:
+* **Deployment Path**:
* Implement an **Event Collector** middleware (e.g., ClickHouse or TimescaleDB) to record real-time request metrics.
* Integrate with **Prometheus/Grafana** for infrastructure-level monitoring.
* Replace the mock generator in `apps/api/src/routes/usage.ts` with real database aggregations.
### 2. Magic Link Authentication
* **Current implementation**: Generates a secure token and logs the **Login URL to the server console**. This allows instant testing without an SMTP server.
-* **Production Path**:
+* **Deployment Path**:
* Plug in an **Email Provider** (Resend, Postmark, or AWS SES) via the `SMTP` driver.
* Update the `auth/magic-link` route to send real emails instead of console logging.
### 3. GitHub OAuth
* **Current implementation**: Uses the **Arctic** library with placeholders for `GITHUB_CLIENT_ID` and `GITHUB_CLIENT_SECRET`.
-* **Production Path**:
+* **Deployment Path**:
* Register a "GitHub OAuth App" in GitHub Developer Settings.
- * Configure the `.env` file with real production credentials.
- * Enable the callback URL pointing to your production domain.
+ * Configure the `.env` file with real credentials.
+ * Enable the callback URL pointing to your deployment domain.
---
-## Definition of Done
+## Project Status
+
+**Core v1 is completed.** ✅
+
+Base0 now provides a complete, functional backend foundation including Authentication, Database, Storage, and a comprehensive Dashboard.
+
+### Future Roadmap (v2+)
+For details on future implementation phases including Realtime capabilities, Vector Databases, and Enterprise Auth, please see [docs/roadmap.md](./roadmap.md).
+
+---
-* **Deployable**: Single `docker compose up` starts the world.
-* **Performant**: Sub-50ms response times for core reads.
-* **Secure**: Passes standard OWASP validation checks.
-* **Observable**: Structured logging is implemented.
+*Built with ❤️ for the edge-native era*
diff --git a/docs/roadmap.md b/docs/roadmap.md
new file mode 100644
index 0000000..d1c5020
--- /dev/null
+++ b/docs/roadmap.md
@@ -0,0 +1,111 @@
+# Base0 Roadmap (2026+)
+
+This document outlines the future development phases for the Base0 platform, building upon the core v1 foundation.
+
+## Core v1 (Completed) ✅
+* **Authentication**: Email/Password, Magic Links, GitHub OAuth.
+* **Database**: Dynamic JSONB schema, CRUD, Filtering, Multi-tenancy.
+* **Storage**: S3/MinIO compatible object storage.
+* **Access Control**: RBAC, API Keys, Permissions.
+* **Dashboard**: Full management UI.
+
+---
+
+## Phase 2: Advanced Data & AI (Planned)
+*Focus: Leveraging modern AI capabilities and advanced data structures.*
+
+### 2.1 Vector Database Integration
+* **Goal**: Native support for vector embeddings to enable semantic search and AI RAG (Retrieval-Augmented Generation) workflows.
+* **Implementation Strategy**:
+ * Integrate `pgvector` extension for PostgreSQL 18.
+ * Add `vector` type to the JSONB schema definition.
+ * Expose similarity search operators (cosine distance, euclidean, etc.) via the filtering engine.
+ * Auto-generate embeddings using an optional OpenAI/Cloudflare AI Worker integration.
+
+### 2.2 Full-Text Search
+* **Goal**: High-performance, typo-tolerant search across millions of records.
+* **Implementation Strategy**:
+ * **Option A**: Native Postgres Full Text Search (tsvector/tsquery). Good for medium scale.
+ * **Option B**: Integration with **Meilisearch** or **Typesense** (as a sidecar container) for dedicated search workloads.
+ * Sync engine to replicate collection writes to the search index.
+
+### 2.3 Graph Relationships
+* **Goal**: Modeling complex, deep relationships without complex joins.
+* **Implementation**:
+ * Enhanced foreign key referencing in JSONB schemas.
+ * Recursive query support for tree structures (e.g., nested comments, organization hierarchies).
+
+---
+
+## Phase 3: Realtime & Event Systems (Planned)
+*Focus: Enabling collaborative and reactive application experiences.*
+
+### 3.1 Realtime Engine
+* **Goal**: Push updates to clients instantly when data changes.
+* **Implementation Strategy**:
+ * **Architecture**: WebSocket server powered by Bun's native `Bun.serve({ websocket })`.
+ * **Mechanism**: PostgreSQL `NOTIFY/LISTEN` channels to detect database changes (Change Data Capture).
+ * **Client SDK**: Subscription API (e.g., `client.collection('posts').subscribe(...)`).
+ * **Protocol**: Lightweight JSON-based protocol with automatic reconnection and state reconciliation.
+
+### 3.2 Server-Side Functions (Edge Functions)
+* **Goal**: Allow users to run custom logic triggered by events or webhooks.
+* **Implementation**:
+ * V8 Isolate sandboxing (via Deno or simple JS eval contexts) to execute user code safely.
+ * Triggers: `onBeforeCreate`, `onAfterUpdate`, `onSchedule` (Cron).
+
+### 3.3 Event Bus & Webhooks
+* **Goal**: External integrations.
+* **Implementation**:
+ * Reliable webhook delivery system with retries (using a Redis/Postgres backed queue).
+ * Admin UI to view payload logs and redelivery status.
+
+---
+
+## Phase 4: Enterprise Identity & Security (Planned)
+*Focus: Hardening security and standardizing the authentication experience.*
+
+### 4.1 Authentication Experience (DevEx)
+* **Goal**: Provide a "drop-in" authentication solution that rivals commercial providers in ease of use.
+* **Implementation Strategy**:
+ * **Unified Auth SDK (Client-side)**:
+ * Single import library: `import { auth } from '@base0/client'`.
+ * Methods: `auth.signUp()`, `auth.signInWithPassword()`, `auth.signInWithOAuth()`, `auth.signOut()`.
+ * **Automated Session Management**:
+ * SDK automatically handles `access_token` storage (in-memory) and `refresh_token` rotation (httpOnly cookie).
+ * Silent refresh mechanism to keep users logged in across page reloads without exposing tokens.
+ * **Reactive State Hooks**:
+ * Real-time listeners: `auth.onAuthStateChange((event, session) => { ... })`.
+ * Events: `SIGNED_IN`, `SIGNED_OUT`, `TOKEN_REFRESHED`, `USER_UPDATED`.
+ * **Security Standards**:
+ * PKCE (Proof Key for Code Exchange) flow for all OAuth operations.
+ * CSRF protection built-in to all mutation endpoints.
+ * Auto-redirect handling for OAuth callbacks (e.g., successful GitHub login -> redirect to configured app URL).
+
+### 4.2 Advanced Authentication Flows
+* **MFA / 2FA**: TOTP (Authenticator App) integration.
+* **Passkeys (WebAuthn)**: Biometric passwordless login (FaceID/TouchID).
+* **SAML / SSO**: Enterprise login for Okta, Auth0, and Active Directory integration.
+* **Anonymous Login**: Temporary guest sessions that can be "upgraded" to full accounts.
+
+### 4.3 Security Hardening
+* **Audit Logs**: Immutable logs of all admin actions (who changed what schema, who deleted what key).
+* **IP Whitelisting**: Restrict API key usage to specific CIDR blocks.
+* **Advanced Rate Limiting**: Redis-backed sliding window limiters per IP/User/Tenant.
+
+---
+
+## Phase 5: Infrastructure & Scaling (Planned)
+*Focus: Horizontal scalability and observability.*
+
+### 5.1 Observability Stack
+* **Metrics**: Native Prometheus endpoint exporting generic runtime metrics (memory, CPU, DB pool).
+* **Tracing**: OpenTelemetry integration for distributed tracing across services.
+
+### 5.2 Horizontal Scaling
+* **Read Replicas**: Support for separating Read/Write DB connections.
+* **Stateless API**: Ensure API nodes can scale infinitely behind a load balancer (Redis for shared session state).
+
+### 5.3 Multi-Region Replication
+* **Goal**: Bring data closer to global users.
+* **Strategy**: Leveraging distributed SQLite (e.g., Turso/LibSQL) or Postgres logical replication.
diff --git a/package.json b/package.json
index 2b62927..9aed04c 100644
--- a/package.json
+++ b/package.json
@@ -8,8 +8,14 @@
"lint": "turbo lint",
"format": "turbo format",
"type-check": "turbo run type-check",
+ "test": "turbo run test",
"check": "biome check .",
- "check:apply": "biome check --write ."
+ "check:apply": "biome check --write .",
+ "docker:build": "docker-compose build",
+ "docker:up": "docker-compose up -d",
+ "docker:down": "docker-compose down",
+ "docker:logs": "docker-compose logs -f",
+ "docker:clean": "docker-compose down -v && docker system prune -f"
},
"workspaces": [
"apps/*",
diff --git a/turbo.json b/turbo.json
index a6c79ca..e880d87 100644
--- a/turbo.json
+++ b/turbo.json
@@ -17,6 +17,9 @@
},
"type-check": {
"dependsOn": ["^type-check"]
+ },
+ "test": {
+ "dependsOn": []
}
}
}