From 14354ea3f1f0fcdf88720be5dc0c9d73e7a9f27d Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 09:15:38 +0100 Subject: [PATCH 01/17] Initial project structure Initial project structure for the task Add App layers, tests, config files --- .env.example | 17 ++++ .gitignore | 54 +++++++++++++ PROJECT_STRUCTURE.md | 152 +++++++++++++++++++++++++++++++++++ app/__init__.py | 0 app/api/__init__.py | 0 app/api/health.py | 18 +++++ app/api/users.py | 58 +++++++++++++ app/core/__init__.py | 0 app/core/config.py | 29 +++++++ app/db/__init__.py | 0 app/db/database.py | 99 +++++++++++++++++++++++ app/db/schema.py | 40 +++++++++ app/main.py | 41 ++++++++++ app/models/__init__.py | 0 app/models/user.py | 93 +++++++++++++++++++++ app/schemas/__init__.py | 0 app/schemas/user.py | 49 +++++++++++ app/services/__init__.py | 0 app/services/user_service.py | 94 ++++++++++++++++++++++ pytest.ini | 10 +++ requirements.txt | 8 ++ tests/__init__.py | 0 tests/conftest.py | 59 ++++++++++++++ tests/test_api.py | 119 +++++++++++++++++++++++++++ tests/test_models.py | 145 +++++++++++++++++++++++++++++++++ tests/test_services.py | 127 +++++++++++++++++++++++++++++ 26 files changed, 1212 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 PROJECT_STRUCTURE.md create mode 100644 app/__init__.py create mode 100644 app/api/__init__.py create mode 100644 app/api/health.py create mode 100644 app/api/users.py create mode 100644 app/core/__init__.py create mode 100644 app/core/config.py create mode 100644 app/db/__init__.py create mode 100644 app/db/database.py create mode 100644 app/db/schema.py create mode 100644 app/main.py create mode 100644 app/models/__init__.py create mode 100644 app/models/user.py create mode 100644 app/schemas/__init__.py create mode 100644 app/schemas/user.py create mode 100644 app/services/__init__.py create mode 100644 app/services/user_service.py create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_api.py create mode 100644 tests/test_models.py create mode 100644 tests/test_services.py diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..dd9a468 --- /dev/null +++ b/.env.example @@ -0,0 +1,17 @@ +# Application Settings +PROJECT_NAME=FastAPI Project +VERSION=1.0.0 +DESCRIPTION=FastAPI project with raw SQL database layer + +# Database Settings +# Format: postgresql://user:password@host:port/database +DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db + +# Test Database Settings (used by pytest) +TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db + +# CORS Settings (comma-separated list) +ALLOWED_ORIGINS=http://localhost:3000,http://localhost:8000 + +# API Settings +API_V1_PREFIX=/api/v1 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5680070 --- /dev/null +++ b/.gitignore @@ -0,0 +1,54 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual Environment +venv/ +env/ +ENV/ +env.bak/ +venv.bak/ + +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment +.env +.env.local + +# Database +*.db +*.sqlite +*.sqlite3 + +# Testing +.pytest_cache/ +.coverage +htmlcov/ +.tox/ + +# OS +.DS_Store +Thumbs.db diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md new file mode 100644 index 0000000..8de4813 --- /dev/null +++ b/PROJECT_STRUCTURE.md @@ -0,0 +1,152 @@ +# FastAPI Project Structure + +A FastAPI project with a database layer using raw SQL and comprehensive pytest test coverage. + +## Project Structure + +``` +. +├── app/ +│ ├── api/ # API endpoints/routes +│ │ ├── health.py # Health check endpoints +│ │ └── users.py # User endpoints +│ ├── core/ # Core configuration +│ │ └── config.py # Application settings +│ ├── db/ # Database layer +│ │ ├── database.py # Database connection and queries +│ │ └── schema.py # Database schema definitions +│ ├── models/ # Data access layer +│ │ └── user.py # User model with raw SQL +│ ├── schemas/ # Pydantic schemas +│ │ └── user.py # User schemas +│ ├── services/ # Business logic layer +│ │ └── user_service.py # User service +│ └── main.py # FastAPI application entry point +├── tests/ # Test files +│ ├── conftest.py # Pytest fixtures +│ ├── test_api.py # API endpoint tests +│ ├── test_models.py # Model/database tests +│ └── test_services.py # Service layer tests +├── .env.example # Example environment variables +├── .gitignore # Git ignore file +├── pytest.ini # Pytest configuration +├── requirements.txt # Project dependencies +├── requirements-dev.txt # Development dependencies +└── README.md # Project documentation +``` + +## Features + +- FastAPI web framework +- Raw SQL database layer (SQLite by default, easily adaptable to PostgreSQL) +- Pydantic for data validation +- Comprehensive test coverage with pytest +- Clean architecture with separation of concerns +- Password hashing with bcrypt +- CORS middleware support +- Environment-based configuration + +## Installation + +1. Create a virtual environment: +```bash +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate +``` + +2. Install dependencies: +```bash +pip install -r requirements-dev.txt +``` + +3. Create environment file: +```bash +cp .env.example .env +``` + +## Running the Application + +Start the development server: +```bash +uvicorn app.main:app --reload +``` + +The API will be available at: +- Main API: http://localhost:8000 +- Interactive docs: http://localhost:8000/docs +- Alternative docs: http://localhost:8000/redoc + +## Running Tests + +Run all tests: +```bash +pytest +``` + +Run with coverage: +```bash +pytest --cov=app --cov-report=html +``` + +Run specific test categories: +```bash +pytest -m unit # Run only unit tests +pytest -m integration # Run only integration tests +``` + +Run specific test file: +```bash +pytest tests/test_api.py +``` + +## API Endpoints + +### Health Check +- `GET /health` - Health check endpoint +- `GET /` - Root endpoint + +### Users (Example) +- `POST /api/v1/users/` - Create a new user +- `GET /api/v1/users/` - Get all users (with pagination) +- `GET /api/v1/users/{user_id}` - Get user by ID +- `PUT /api/v1/users/{user_id}` - Update user +- `DELETE /api/v1/users/{user_id}` - Delete user + +## Database + +This project uses raw SQL queries (no ORM). The database layer provides: +- Connection management with context managers +- Transaction handling (commit/rollback) +- Parameterized queries for security +- Custom query execution methods + +The example uses SQLite, but can easily be adapted to PostgreSQL by: +1. Installing `psycopg2` or `asyncpg` +2. Updating the `DATABASE_URL` in `.env` +3. Modifying `app/db/database.py` to use PostgreSQL connection + +## Testing + +The test suite includes: +- **Unit tests**: Test individual components in isolation +- **Integration tests**: Test API endpoints and service integration +- Fixtures for test data and database setup +- Automatic database cleanup between tests + +## Development + +The project follows clean architecture principles: +1. **API Layer** (`app/api/`): HTTP request/response handling +2. **Service Layer** (`app/services/`): Business logic +3. **Model Layer** (`app/models/`): Data access with raw SQL +4. **Schema Layer** (`app/schemas/`): Data validation +5. **Database Layer** (`app/db/`): Database connection and schema + +## Adapting for Other Use Cases + +This structure provides a foundation that can be adapted for various applications: +- Replace `users` with your domain entities (e.g., `servers`, `products`, `orders`) +- Update schemas in `app/schemas/` for your data models +- Modify database schema in `app/db/schema.py` +- Update models in `app/models/` with your SQL queries +- Adjust API endpoints in `app/api/` diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/health.py b/app/api/health.py new file mode 100644 index 0000000..aa8e1c4 --- /dev/null +++ b/app/api/health.py @@ -0,0 +1,18 @@ +""" +Health check endpoints. +""" +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/health") +async def health_check(): + """Health check endpoint.""" + return {"status": "healthy"} + + +@router.get("/") +async def root(): + """Root endpoint.""" + return {"message": "Welcome to FastAPI with Raw SQL"} diff --git a/app/api/users.py b/app/api/users.py new file mode 100644 index 0000000..480ccd7 --- /dev/null +++ b/app/api/users.py @@ -0,0 +1,58 @@ +""" +User API endpoints. +""" +from typing import List +from fastapi import APIRouter, HTTPException, status + +from app.schemas.user import UserCreate, UserUpdate, UserResponse +from app.services.user_service import UserService + +router = APIRouter() + + +@router.post("/", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def create_user(user: UserCreate): + """Create a new user.""" + service = UserService() + try: + return service.create_user(user) + except ValueError as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + + +@router.get("/", response_model=List[UserResponse]) +async def get_users(skip: int = 0, limit: int = 100): + """Get all users with pagination.""" + service = UserService() + return service.get_users(skip=skip, limit=limit) + + +@router.get("/{user_id}", response_model=UserResponse) +async def get_user(user_id: int): + """Get user by ID.""" + service = UserService() + user = service.get_user(user_id) + if not user: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + return user + + +@router.put("/{user_id}", response_model=UserResponse) +async def update_user(user_id: int, user_update: UserUpdate): + """Update user.""" + service = UserService() + try: + user = service.update_user(user_id, user_update) + if not user: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + return user + except ValueError as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + + +@router.delete("/{user_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_user(user_id: int): + """Delete user.""" + service = UserService() + if not service.delete_user(user_id): + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..5329d88 --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,29 @@ +""" +Application configuration settings. +""" +from typing import List +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + """Application settings.""" + + PROJECT_NAME: str = "FastAPI Project" + VERSION: str = "1.0.0" + DESCRIPTION: str = "FastAPI project with raw SQL database layer" + + # Database settings + DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/app_db" + + # CORS settings + ALLOWED_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:8000"] + + # API settings + API_V1_PREFIX: str = "/api/v1" + + class Config: + env_file = ".env" + case_sensitive = True + + +settings = Settings() diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/db/database.py b/app/db/database.py new file mode 100644 index 0000000..0ea75bf --- /dev/null +++ b/app/db/database.py @@ -0,0 +1,99 @@ +""" +Database connection and management using raw SQL. +""" +import psycopg2 +import psycopg2.extras +from contextlib import contextmanager +from typing import Generator +from urllib.parse import urlparse + +from app.core.config import settings + + +class Database: + """Database connection manager.""" + + def __init__(self, db_url: str): + """Initialize database with connection URL.""" + self.db_url = db_url + self._connection = None + self._parse_connection_params() + + def _parse_connection_params(self): + """Parse database URL into connection parameters.""" + if self.db_url.startswith("postgresql://"): + parsed = urlparse(self.db_url) + self.conn_params = { + "host": parsed.hostname, + "port": parsed.port or 5432, + "database": parsed.path[1:], + "user": parsed.username, + "password": parsed.password, + } + else: + # Fallback for direct parameter format + self.conn_params = {"dsn": self.db_url} + + def connect(self): + """Create database connection.""" + if self._connection is None or self._connection.closed: + self._connection = psycopg2.connect(**self.conn_params) + return self._connection + + def close(self): + """Close database connection.""" + if self._connection and not self._connection.closed: + self._connection.close() + + @contextmanager + def get_cursor(self) -> Generator[psycopg2.extras.RealDictCursor, None, None]: + """Get database cursor with automatic commit/rollback.""" + conn = self.connect() + cursor = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + try: + yield cursor + conn.commit() + except Exception as e: + conn.rollback() + raise e + finally: + cursor.close() + + def execute_query(self, query: str, params: tuple = ()): + """Execute a SQL query and return results.""" + with self.get_cursor() as cursor: + cursor.execute(query, params) + return cursor.fetchall() + + def execute_update(self, query: str, params: tuple = ()): + """Execute an update/insert/delete query.""" + with self.get_cursor() as cursor: + cursor.execute(query, params) + return cursor.rowcount + + def execute_insert(self, query: str, params: tuple = ()): + """Execute an insert query and return last inserted ID.""" + with self.get_cursor() as cursor: + cursor.execute(query + " RETURNING id", params) + result = cursor.fetchone() + return result["id"] if result else None + + +# Global database instance +db = Database(settings.DATABASE_URL) + + +def get_db() -> Database: + """Get database instance for dependency injection.""" + return db + + +def init_db(): + """Initialize database and create tables.""" + from app.db.schema import create_tables + create_tables() + + +def close_db(): + """Close database connection.""" + db.close() diff --git a/app/db/schema.py b/app/db/schema.py new file mode 100644 index 0000000..38b9d9e --- /dev/null +++ b/app/db/schema.py @@ -0,0 +1,40 @@ +""" +Database schema definitions using raw SQL. +""" +from app.db.database import get_db + + +def create_tables(): + """Create all database tables.""" + db = get_db() + + # Users table + users_table = """ + CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE NOT NULL, + username VARCHAR(255) UNIQUE NOT NULL, + full_name VARCHAR(255), + hashed_password VARCHAR(255) NOT NULL, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """ + + # Create indexes + email_index = "CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)" + username_index = "CREATE INDEX IF NOT EXISTS idx_users_username ON users(username)" + + with db.get_cursor() as cursor: + cursor.execute(users_table) + cursor.execute(email_index) + cursor.execute(username_index) + + +def drop_tables(): + """Drop all database tables (useful for testing).""" + db = get_db() + + with db.get_cursor() as cursor: + cursor.execute("DROP TABLE IF EXISTS users") diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..71606af --- /dev/null +++ b/app/main.py @@ -0,0 +1,41 @@ +""" +Main FastAPI application entry point. +""" +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.api import users, health + +app = FastAPI( + title=settings.PROJECT_NAME, + version=settings.VERSION, + description=settings.DESCRIPTION, +) + +# Configure CORS +app.add_middleware( + CORSMiddleware, + allow_origins=settings.ALLOWED_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(health.router, tags=["health"]) +app.include_router(users.router, prefix="/api/v1/users", tags=["users"]) + + +@app.on_event("startup") +async def startup_event(): + """Run on application startup.""" + from app.db.database import init_db + init_db() + + +@app.on_event("shutdown") +async def shutdown_event(): + """Run on application shutdown.""" + from app.db.database import close_db + close_db() diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/user.py b/app/models/user.py new file mode 100644 index 0000000..d75f881 --- /dev/null +++ b/app/models/user.py @@ -0,0 +1,93 @@ +""" +User data access layer using raw SQL. +""" +from typing import List, Optional +from datetime import datetime + +from app.db.database import Database +from app.schemas.user import UserCreate, UserUpdate + + +class UserModel: + """User model with raw SQL queries.""" + + def __init__(self, db: Database): + self.db = db + + def create(self, user: UserCreate, hashed_password: str) -> int: + """Create a new user.""" + query = """ + INSERT INTO users (email, username, full_name, hashed_password) + VALUES (?, ?, ?, ?) + """ + params = (user.email, user.username, user.full_name, hashed_password) + return self.db.execute_insert(query, params) + + def get_by_id(self, user_id: int) -> Optional[dict]: + """Get user by ID.""" + query = "SELECT * FROM users WHERE id = ?" + results = self.db.execute_query(query, (user_id,)) + if results: + return dict(results[0]) + return None + + def get_by_email(self, email: str) -> Optional[dict]: + """Get user by email.""" + query = "SELECT * FROM users WHERE email = ?" + results = self.db.execute_query(query, (email,)) + if results: + return dict(results[0]) + return None + + def get_by_username(self, username: str) -> Optional[dict]: + """Get user by username.""" + query = "SELECT * FROM users WHERE username = ?" + results = self.db.execute_query(query, (username,)) + if results: + return dict(results[0]) + return None + + def get_all(self, skip: int = 0, limit: int = 100) -> List[dict]: + """Get all users with pagination.""" + query = "SELECT * FROM users ORDER BY created_at DESC LIMIT ? OFFSET ?" + results = self.db.execute_query(query, (limit, skip)) + return [dict(row) for row in results] + + def update(self, user_id: int, user: UserUpdate, hashed_password: Optional[str] = None) -> int: + """Update user.""" + updates = [] + params = [] + + if user.email is not None: + updates.append("email = ?") + params.append(user.email) + if user.username is not None: + updates.append("username = ?") + params.append(user.username) + if user.full_name is not None: + updates.append("full_name = ?") + params.append(user.full_name) + if hashed_password is not None: + updates.append("hashed_password = ?") + params.append(hashed_password) + + if not updates: + return 0 + + updates.append("updated_at = ?") + params.append(datetime.utcnow().isoformat()) + params.append(user_id) + + query = f"UPDATE users SET {', '.join(updates)} WHERE id = ?" + return self.db.execute_update(query, tuple(params)) + + def delete(self, user_id: int) -> int: + """Delete user.""" + query = "DELETE FROM users WHERE id = ?" + return self.db.execute_update(query, (user_id,)) + + def count(self) -> int: + """Count total users.""" + query = "SELECT COUNT(*) as count FROM users" + result = self.db.execute_query(query) + return result[0]['count'] if result else 0 diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/schemas/user.py b/app/schemas/user.py new file mode 100644 index 0000000..868510c --- /dev/null +++ b/app/schemas/user.py @@ -0,0 +1,49 @@ +""" +User schema definitions using Pydantic. +""" +from typing import Optional +from datetime import datetime +from pydantic import BaseModel, EmailStr, Field + + +class UserBase(BaseModel): + """Base user schema.""" + email: EmailStr + username: str = Field(..., min_length=3, max_length=50) + full_name: Optional[str] = None + + +class UserCreate(UserBase): + """Schema for creating a user.""" + password: str = Field(..., min_length=8) + + +class UserUpdate(BaseModel): + """Schema for updating a user.""" + email: Optional[EmailStr] = None + username: Optional[str] = Field(None, min_length=3, max_length=50) + full_name: Optional[str] = None + password: Optional[str] = Field(None, min_length=8) + + +class UserInDB(UserBase): + """Schema for user in database.""" + id: int + hashed_password: str + is_active: bool + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class UserResponse(UserBase): + """Schema for user response.""" + id: int + is_active: bool + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/services/user_service.py b/app/services/user_service.py new file mode 100644 index 0000000..86c26ba --- /dev/null +++ b/app/services/user_service.py @@ -0,0 +1,94 @@ +""" +User service layer with business logic. +""" +from typing import List, Optional +from passlib.context import CryptContext + +from app.db.database import get_db +from app.models.user import UserModel +from app.schemas.user import UserCreate, UserUpdate, UserResponse + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +class UserService: + """User service for business logic.""" + + def __init__(self): + self.db = get_db() + self.user_model = UserModel(self.db) + + @staticmethod + def hash_password(password: str) -> str: + """Hash a password.""" + return pwd_context.hash(password) + + @staticmethod + def verify_password(plain_password: str, hashed_password: str) -> bool: + """Verify a password against a hash.""" + return pwd_context.verify(plain_password, hashed_password) + + def create_user(self, user: UserCreate) -> UserResponse: + """Create a new user.""" + # Check if email already exists + if self.user_model.get_by_email(user.email): + raise ValueError("Email already registered") + + # Check if username already exists + if self.user_model.get_by_username(user.username): + raise ValueError("Username already taken") + + # Hash password and create user + hashed_password = self.hash_password(user.password) + user_id = self.user_model.create(user, hashed_password) + + # Retrieve and return created user + user_dict = self.user_model.get_by_id(user_id) + return UserResponse(**user_dict) + + def get_user(self, user_id: int) -> Optional[UserResponse]: + """Get user by ID.""" + user_dict = self.user_model.get_by_id(user_id) + if user_dict: + return UserResponse(**user_dict) + return None + + def get_users(self, skip: int = 0, limit: int = 100) -> List[UserResponse]: + """Get all users with pagination.""" + users = self.user_model.get_all(skip, limit) + return [UserResponse(**user) for user in users] + + def update_user(self, user_id: int, user_update: UserUpdate) -> Optional[UserResponse]: + """Update user.""" + # Check if user exists + existing_user = self.user_model.get_by_id(user_id) + if not existing_user: + return None + + # Check email uniqueness if being updated + if user_update.email and user_update.email != existing_user['email']: + if self.user_model.get_by_email(user_update.email): + raise ValueError("Email already registered") + + # Check username uniqueness if being updated + if user_update.username and user_update.username != existing_user['username']: + if self.user_model.get_by_username(user_update.username): + raise ValueError("Username already taken") + + # Hash password if being updated + hashed_password = None + if user_update.password: + hashed_password = self.hash_password(user_update.password) + + # Update user + self.user_model.update(user_id, user_update, hashed_password) + + # Retrieve and return updated user + user_dict = self.user_model.get_by_id(user_id) + return UserResponse(**user_dict) + + def delete_user(self, user_id: int) -> bool: + """Delete user.""" + rows_affected = self.user_model.delete(user_id) + return rows_affected > 0 diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..550075a --- /dev/null +++ b/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = -v --tb=short --strict-markers +markers = + unit: Unit tests + integration: Integration tests + slow: Slow running tests diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8348813 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +fastapi==0.109.0 +uvicorn[standard]==0.27.0 +pydantic==2.5.3 +pydantic-settings==2.1.0 +email-validator==2.1.0 +passlib[bcrypt]==1.7.4 +python-multipart==0.0.6 +psycopg2-binary==2.9.9 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..26f3112 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,59 @@ +""" +Pytest configuration and fixtures. +""" +import os +import pytest +from fastapi.testclient import TestClient + +from app.main import app +from app.db.database import Database +from app.db.schema import create_tables, drop_tables + + +@pytest.fixture(scope="session") +def test_db(): + """Create a test database.""" + # Use environment variable or default test database + test_db_url = os.getenv( + "TEST_DATABASE_URL", + "postgresql://postgres:postgres@localhost:5432/test_db" + ) + + # Create test database connection + db = Database(test_db_url) + + # Drop and create tables + drop_tables() + create_tables() + + yield db + + # Cleanup + drop_tables() + db.close() + + +@pytest.fixture(scope="function") +def clean_db(test_db): + """Clean database before each test.""" + drop_tables() + create_tables() + yield test_db + + +@pytest.fixture(scope="module") +def client(): + """Create a test client.""" + with TestClient(app) as test_client: + yield test_client + + +@pytest.fixture +def sample_user_data(): + """Sample user data for testing.""" + return { + "email": "test@example.com", + "username": "testuser", + "full_name": "Test User", + "password": "testpassword123" + } diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..d22dd82 --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,119 @@ +""" +API endpoint tests. +""" +import pytest +from fastapi import status + + +@pytest.mark.integration +def test_health_check(client): + """Test health check endpoint.""" + response = client.get("/health") + assert response.status_code == status.HTTP_200_OK + assert response.json() == {"status": "healthy"} + + +@pytest.mark.integration +def test_root_endpoint(client): + """Test root endpoint.""" + response = client.get("/") + assert response.status_code == status.HTTP_200_OK + assert "message" in response.json() + + +@pytest.mark.integration +def test_create_user(client, sample_user_data): + """Test creating a user.""" + response = client.post("/api/v1/users/", json=sample_user_data) + assert response.status_code == status.HTTP_201_CREATED + + data = response.json() + assert data["email"] == sample_user_data["email"] + assert data["username"] == sample_user_data["username"] + assert data["full_name"] == sample_user_data["full_name"] + assert "id" in data + assert "password" not in data + assert "hashed_password" not in data + + +@pytest.mark.integration +def test_create_user_duplicate_email(client, sample_user_data): + """Test creating a user with duplicate email.""" + # Create first user + client.post("/api/v1/users/", json=sample_user_data) + + # Try to create second user with same email + response = client.post("/api/v1/users/", json=sample_user_data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "Email already registered" in response.json()["detail"] + + +@pytest.mark.integration +def test_get_users(client, sample_user_data): + """Test getting all users.""" + # Create a user first + client.post("/api/v1/users/", json=sample_user_data) + + # Get all users + response = client.get("/api/v1/users/") + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert isinstance(data, list) + assert len(data) > 0 + + +@pytest.mark.integration +def test_get_user_by_id(client, sample_user_data): + """Test getting a user by ID.""" + # Create a user + create_response = client.post("/api/v1/users/", json=sample_user_data) + user_id = create_response.json()["id"] + + # Get the user + response = client.get(f"/api/v1/users/{user_id}") + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert data["id"] == user_id + assert data["email"] == sample_user_data["email"] + + +@pytest.mark.integration +def test_get_user_not_found(client): + """Test getting a non-existent user.""" + response = client.get("/api/v1/users/99999") + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.integration +def test_update_user(client, sample_user_data): + """Test updating a user.""" + # Create a user + create_response = client.post("/api/v1/users/", json=sample_user_data) + user_id = create_response.json()["id"] + + # Update the user + update_data = {"full_name": "Updated Name"} + response = client.put(f"/api/v1/users/{user_id}", json=update_data) + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert data["full_name"] == "Updated Name" + assert data["email"] == sample_user_data["email"] + + +@pytest.mark.integration +def test_delete_user(client, sample_user_data): + """Test deleting a user.""" + # Create a user + create_response = client.post("/api/v1/users/", json=sample_user_data) + user_id = create_response.json()["id"] + + # Delete the user + response = client.delete(f"/api/v1/users/{user_id}") + assert response.status_code == status.HTTP_204_NO_CONTENT + + # Verify user is deleted + get_response = client.get(f"/api/v1/users/{user_id}") + assert get_response.status_code == status.HTTP_404_NOT_FOUND diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..00401b9 --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,145 @@ +""" +Database model tests. +""" +import pytest + +from app.models.user import UserModel +from app.schemas.user import UserCreate + + +@pytest.mark.unit +def test_create_user(clean_db): + """Test creating a user in the database.""" + user_model = UserModel(clean_db) + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user_id = user_model.create(user_data, "hashed_password") + assert user_id > 0 + + # Verify user was created + user = user_model.get_by_id(user_id) + assert user is not None + assert user["email"] == "test@example.com" + assert user["username"] == "testuser" + + +@pytest.mark.unit +def test_get_user_by_email(clean_db): + """Test getting a user by email.""" + user_model = UserModel(clean_db) + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user_model.create(user_data, "hashed_password") + user = user_model.get_by_email("test@example.com") + + assert user is not None + assert user["email"] == "test@example.com" + + +@pytest.mark.unit +def test_get_user_by_username(clean_db): + """Test getting a user by username.""" + user_model = UserModel(clean_db) + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user_model.create(user_data, "hashed_password") + user = user_model.get_by_username("testuser") + + assert user is not None + assert user["username"] == "testuser" + + +@pytest.mark.unit +def test_get_all_users(clean_db): + """Test getting all users.""" + user_model = UserModel(clean_db) + + # Create multiple users + for i in range(3): + user_data = UserCreate( + email=f"test{i}@example.com", + username=f"testuser{i}", + full_name=f"Test User {i}", + password="testpass123" + ) + user_model.create(user_data, "hashed_password") + + users = user_model.get_all() + assert len(users) == 3 + + +@pytest.mark.unit +def test_update_user(clean_db): + """Test updating a user.""" + user_model = UserModel(clean_db) + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user_id = user_model.create(user_data, "hashed_password") + + from app.schemas.user import UserUpdate + update_data = UserUpdate(full_name="Updated Name") + rows_affected = user_model.update(user_id, update_data) + + assert rows_affected > 0 + + updated_user = user_model.get_by_id(user_id) + assert updated_user["full_name"] == "Updated Name" + + +@pytest.mark.unit +def test_delete_user(clean_db): + """Test deleting a user.""" + user_model = UserModel(clean_db) + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user_id = user_model.create(user_data, "hashed_password") + rows_affected = user_model.delete(user_id) + + assert rows_affected > 0 + + deleted_user = user_model.get_by_id(user_id) + assert deleted_user is None + + +@pytest.mark.unit +def test_count_users(clean_db): + """Test counting users.""" + user_model = UserModel(clean_db) + + # Create multiple users + for i in range(5): + user_data = UserCreate( + email=f"test{i}@example.com", + username=f"testuser{i}", + full_name=f"Test User {i}", + password="testpass123" + ) + user_model.create(user_data, "hashed_password") + + count = user_model.count() + assert count == 5 diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..626b8c6 --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,127 @@ +""" +Service layer tests. +""" +import pytest + +from app.services.user_service import UserService +from app.schemas.user import UserCreate, UserUpdate + + +@pytest.mark.unit +def test_hash_password(): + """Test password hashing.""" + password = "testpassword123" + hashed = UserService.hash_password(password) + + assert hashed != password + assert UserService.verify_password(password, hashed) + + +@pytest.mark.unit +def test_verify_password_invalid(): + """Test password verification with wrong password.""" + password = "testpassword123" + hashed = UserService.hash_password(password) + + assert not UserService.verify_password("wrongpassword", hashed) + + +@pytest.mark.integration +def test_create_user_service(clean_db): + """Test creating a user through service.""" + service = UserService() + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + user = service.create_user(user_data) + + assert user.email == "test@example.com" + assert user.username == "testuser" + assert user.id > 0 + + +@pytest.mark.integration +def test_create_user_duplicate_email(clean_db): + """Test creating user with duplicate email.""" + service = UserService() + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + service.create_user(user_data) + + # Try to create another user with same email + duplicate_data = UserCreate( + email="test@example.com", + username="anotheruser", + full_name="Another User", + password="testpass123" + ) + + with pytest.raises(ValueError, match="Email already registered"): + service.create_user(duplicate_data) + + +@pytest.mark.integration +def test_get_user_service(clean_db): + """Test getting a user through service.""" + service = UserService() + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + created_user = service.create_user(user_data) + retrieved_user = service.get_user(created_user.id) + + assert retrieved_user is not None + assert retrieved_user.id == created_user.id + + +@pytest.mark.integration +def test_update_user_service(clean_db): + """Test updating a user through service.""" + service = UserService() + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + created_user = service.create_user(user_data) + + update_data = UserUpdate(full_name="Updated Name") + updated_user = service.update_user(created_user.id, update_data) + + assert updated_user is not None + assert updated_user.full_name == "Updated Name" + + +@pytest.mark.integration +def test_delete_user_service(clean_db): + """Test deleting a user through service.""" + service = UserService() + user_data = UserCreate( + email="test@example.com", + username="testuser", + full_name="Test User", + password="testpass123" + ) + + created_user = service.create_user(user_data) + deleted = service.delete_user(created_user.id) + + assert deleted is True + + retrieved_user = service.get_user(created_user.id) + assert retrieved_user is None From c229fa4a6c359e042e0a3500138bce82575bfc38 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 09:27:01 +0100 Subject: [PATCH 02/17] Switch to Poetry Switch to Poetry dependency management Create dev and prod environments and separate deps for it Create Makefile helper to run app, tests etc --- .flake8 | 14 +++ .gitignore | 6 ++ Makefile | 49 +++++++++++ PROJECT_STRUCTURE.md | 199 ++++++++++++++++++++++++++++++++++++++----- pyproject.toml | 90 +++++++++++++++++++ pytest.ini | 10 --- 6 files changed, 335 insertions(+), 33 deletions(-) create mode 100644 .flake8 create mode 100644 Makefile create mode 100644 pyproject.toml delete mode 100644 pytest.ini diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..fa87f42 --- /dev/null +++ b/.flake8 @@ -0,0 +1,14 @@ +[flake8] +max-line-length = 100 +extend-ignore = E203, E266, E501, W503 +exclude = + .git, + __pycache__, + .venv, + venv, + .eggs, + *.egg, + build, + dist, + .tox +max-complexity = 10 diff --git a/.gitignore b/.gitignore index 5680070..a311254 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,12 @@ env/ ENV/ env.bak/ venv.bak/ +.venv/ + +# Poetry +# Note: poetry.lock should typically be committed for applications +# Uncomment the line below if you want to ignore it +# poetry.lock # IDEs .vscode/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d085693 --- /dev/null +++ b/Makefile @@ -0,0 +1,49 @@ +.PHONY: help install install-prod run test test-cov format lint check clean + +help: + @echo "Available commands:" + @echo " make install - Install all dependencies (dev + prod)" + @echo " make install-prod - Install production dependencies only" + @echo " make run - Run the development server" + @echo " make test - Run tests" + @echo " make test-cov - Run tests with coverage report" + @echo " make format - Format code with black and isort" + @echo " make lint - Run linting with flake8" + @echo " make check - Run all code quality checks" + @echo " make clean - Clean up cache files" + +install: + poetry install + +install-prod: + poetry install --only main + +run: + poetry run uvicorn app.main:app --reload + +test: + poetry run pytest + +test-cov: + poetry run pytest --cov=app --cov-report=html --cov-report=term + +format: + poetry run black app/ tests/ + poetry run isort app/ tests/ + +lint: + poetry run flake8 app/ tests/ + poetry run mypy app/ + +check: format lint test + @echo "All checks passed!" + +clean: + find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true + find . -type f -name "*.pyc" -delete + find . -type f -name "*.pyo" -delete + find . -type f -name "*.coverage" -delete + rm -rf .pytest_cache + rm -rf htmlcov + rm -rf .mypy_cache + @echo "Cleaned up cache files" diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md index 8de4813..547db17 100644 --- a/PROJECT_STRUCTURE.md +++ b/PROJECT_STRUCTURE.md @@ -28,17 +28,18 @@ A FastAPI project with a database layer using raw SQL and comprehensive pytest t │ ├── test_models.py # Model/database tests │ └── test_services.py # Service layer tests ├── .env.example # Example environment variables +├── .flake8 # Flake8 linter configuration ├── .gitignore # Git ignore file -├── pytest.ini # Pytest configuration -├── requirements.txt # Project dependencies -├── requirements-dev.txt # Development dependencies +├── Makefile # Common commands for development +├── pyproject.toml # Poetry configuration and dependencies └── README.md # Project documentation ``` ## Features - FastAPI web framework -- Raw SQL database layer (SQLite by default, easily adaptable to PostgreSQL) +- PostgreSQL database with raw SQL (no ORM) +- Poetry for dependency management - Pydantic for data validation - Comprehensive test coverage with pytest - Clean architecture with separation of concerns @@ -48,26 +49,65 @@ A FastAPI project with a database layer using raw SQL and comprehensive pytest t ## Installation -1. Create a virtual environment: +### Prerequisites +- Python 3.9 or higher +- Poetry ([installation guide](https://python-poetry.org/docs/#installation)) +- PostgreSQL server + +### Setup Steps + +#### For Development Environment + +1. Install all dependencies including dev tools (testing, linting, formatting): ```bash -python -m venv venv -source venv/bin/activate # On Windows: venv\Scripts\activate +poetry install ``` -2. Install dependencies: +#### For Production Environment + +1. Install only production dependencies (no dev tools): ```bash -pip install -r requirements-dev.txt +poetry install --only main ``` -3. Create environment file: +Or exclude dev dependencies: +```bash +poetry install --without dev +``` + +2. Create environment file: ```bash cp .env.example .env ``` +3. Update the `.env` file with your PostgreSQL credentials: +``` +DATABASE_URL=postgresql://username:password@localhost:5432/app_db +TEST_DATABASE_URL=postgresql://username:password@localhost:5432/test_db +``` + +4. Create the databases: +```bash +# Using psql or your preferred PostgreSQL client +createdb app_db +createdb test_db +``` + ## Running the Application -Start the development server: +### Using Make (recommended) +```bash +make run +``` + +### Using Poetry directly ```bash +poetry run uvicorn app.main:app --reload +``` + +### Or activate the Poetry shell first +```bash +poetry shell uvicorn app.main:app --reload ``` @@ -78,25 +118,37 @@ The API will be available at: ## Running Tests -Run all tests: +### Using Make (recommended) ```bash -pytest +make test # Run all tests +make test-cov # Run tests with coverage report +``` + +### Using Poetry directly +```bash +poetry run pytest ``` Run with coverage: ```bash -pytest --cov=app --cov-report=html +poetry run pytest --cov=app --cov-report=html ``` Run specific test categories: ```bash -pytest -m unit # Run only unit tests -pytest -m integration # Run only integration tests +poetry run pytest -m unit # Run only unit tests +poetry run pytest -m integration # Run only integration tests ``` Run specific test file: ```bash -pytest tests/test_api.py +poetry run pytest tests/test_api.py +``` + +Or activate the Poetry shell and run pytest directly: +```bash +poetry shell +pytest ``` ## API Endpoints @@ -114,16 +166,23 @@ pytest tests/test_api.py ## Database -This project uses raw SQL queries (no ORM). The database layer provides: +This project uses PostgreSQL with raw SQL queries (no ORM). The database layer provides: - Connection management with context managers - Transaction handling (commit/rollback) -- Parameterized queries for security +- Parameterized queries for security (SQL injection protection) - Custom query execution methods +- PostgreSQL-specific features (SERIAL, RETURNING, etc.) + +### Database Configuration + +Connection string format: +``` +postgresql://username:password@host:port/database +``` -The example uses SQLite, but can easily be adapted to PostgreSQL by: -1. Installing `psycopg2` or `asyncpg` -2. Updating the `DATABASE_URL` in `.env` -3. Modifying `app/db/database.py` to use PostgreSQL connection +Configure in `.env` file: +- `DATABASE_URL` - Main application database +- `TEST_DATABASE_URL` - Separate database for running tests ## Testing @@ -142,6 +201,100 @@ The project follows clean architecture principles: 4. **Schema Layer** (`app/schemas/`): Data validation 5. **Database Layer** (`app/db/`): Database connection and schema +## Managing Dependencies with Poetry + +### Installing Dependencies + +Development (includes all tools): +```bash +poetry install +``` + +Production only: +```bash +poetry install --only main +``` + +Without dev dependencies: +```bash +poetry install --without dev +``` + +### Adding Dependencies + +Add a production dependency: +```bash +poetry add package-name +``` + +Add a development dependency: +```bash +poetry add --group dev package-name +``` + +### Other Commands + +Update dependencies: +```bash +poetry update +``` + +Show installed packages: +```bash +poetry show +``` + +Export requirements.txt (if needed for Docker/compatibility): +```bash +# Production only +poetry export -f requirements.txt --output requirements.txt --without-hashes --without dev + +# All dependencies +poetry export -f requirements.txt --output requirements-dev.txt --without-hashes +``` + +## Code Quality Tools (Dev Environment) + +The dev environment includes tools for code quality: + +### Using Make (recommended) +```bash +make format # Format code with black and isort +make lint # Lint with flake8 and type check with mypy +make check # Run format, lint, and tests +make clean # Clean up cache files +``` + +### Using Poetry directly + +Format code with Black: +```bash +poetry run black app/ tests/ +``` + +Sort imports with isort: +```bash +poetry run isort app/ tests/ +``` + +Lint with flake8: +```bash +poetry run flake8 app/ tests/ +``` + +Type check with mypy: +```bash +poetry run mypy app/ +``` + +Run all quality checks: +```bash +poetry run black app/ tests/ && \ +poetry run isort app/ tests/ && \ +poetry run flake8 app/ tests/ && \ +poetry run mypy app/ +``` + ## Adapting for Other Use Cases This structure provides a foundation that can be adapted for various applications: diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..da1309d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,90 @@ +[tool.poetry] +name = "fastapi-postgres-app" +version = "1.0.0" +description = "FastAPI project with raw SQL database layer using PostgreSQL" +authors = ["Your Name "] +readme = "README.md" +packages = [{include = "app"}] + +# Production dependencies only +[tool.poetry.dependencies] +python = "^3.9" +fastapi = "^0.109.0" +uvicorn = {extras = ["standard"], version = "^0.27.0"} +pydantic = "^2.5.3" +pydantic-settings = "^2.1.0" +email-validator = "^2.1.0" +passlib = {extras = ["bcrypt"], version = "^1.7.4"} +python-multipart = "^0.0.6" +psycopg2-binary = "^2.9.9" + +# Development dependencies (testing, linting, formatting) +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +# Testing +pytest = "^7.4.4" +pytest-asyncio = "^0.23.3" +pytest-cov = "^4.1.0" +httpx = "^0.26.0" + +# Code quality +black = "^23.12.1" +isort = "^5.13.2" +flake8 = "^7.0.0" +mypy = "^1.8.0" + +# Development tools +ipython = "^8.20.0" +python-dotenv = "^1.0.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = "test_*.py" +python_classes = "Test*" +python_functions = "test_*" +addopts = "-v --tb=short --strict-markers" +markers = [ + "unit: Unit tests", + "integration: Integration tests", + "slow: Slow running tests", +] + +# Black code formatter configuration +[tool.black] +line-length = 100 +target-version = ['py39', 'py310', 'py311'] +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.venv + | \.eggs + | \.tox + | build + | dist +)/ +''' + +# isort import sorting configuration +[tool.isort] +profile = "black" +line_length = 100 +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true + +# mypy type checking configuration +[tool.mypy] +python_version = "3.9" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = false +ignore_missing_imports = true diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 550075a..0000000 --- a/pytest.ini +++ /dev/null @@ -1,10 +0,0 @@ -[pytest] -testpaths = tests -python_files = test_*.py -python_classes = Test* -python_functions = test_* -addopts = -v --tb=short --strict-markers -markers = - unit: Unit tests - integration: Integration tests - slow: Slow running tests From 3951e3d2964b85769011f2c1d2b70e4fb873860d Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 10:54:39 +0100 Subject: [PATCH 03/17] Separate environments Setup environment specific config via env variables Use separate settings classes just in case some environments require additional variables --- .env.development | 7 +++ .env.example | 20 +++++- .env.production.example | 8 +++ .env.testing | 7 +++ .gitignore | 1 + Makefile | 19 ++++-- PROJECT_STRUCTURE.md | 132 ++++++++++++++++++++++++++++++++++++++++ app/core/config.py | 126 ++++++++++++++++++++++++++++++++++---- app/main.py | 18 +++--- pyproject.toml | 4 ++ tests/conftest.py | 22 ++++--- 11 files changed, 325 insertions(+), 39 deletions(-) create mode 100644 .env.development create mode 100644 .env.production.example create mode 100644 .env.testing diff --git a/.env.development b/.env.development new file mode 100644 index 0000000..cc395a8 --- /dev/null +++ b/.env.development @@ -0,0 +1,7 @@ +APP_ENV=development +DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db +TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +DEBUG=true +LOG_LEVEL=DEBUG +ALLOWED_ORIGINS=http://localhost:3000,http://localhost:8000,http://localhost:5173 +ENABLE_CORS=true diff --git a/.env.example b/.env.example index dd9a468..783815d 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,7 @@ +# Environment Selection +# Options: development, testing, production +APP_ENV=development + # Application Settings PROJECT_NAME=FastAPI Project VERSION=1.0.0 @@ -6,12 +10,22 @@ DESCRIPTION=FastAPI project with raw SQL database layer # Database Settings # Format: postgresql://user:password@host:port/database DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db - -# Test Database Settings (used by pytest) TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db -# CORS Settings (comma-separated list) +# CORS Settings (comma-separated) ALLOWED_ORIGINS=http://localhost:3000,http://localhost:8000 +ENABLE_CORS=true + +# Logging +DEBUG=true +LOG_LEVEL=DEBUG # API Settings API_V1_PREFIX=/api/v1 + +# Security +SECRET_KEY=dev-secret-key-change-in-production + +# External Services (optional) +REDIS_URL=redis://localhost:6379/0 +EXTERNAL_API_URL=http://localhost:9000 diff --git a/.env.production.example b/.env.production.example new file mode 100644 index 0000000..de6f818 --- /dev/null +++ b/.env.production.example @@ -0,0 +1,8 @@ +APP_ENV=production +# REQUIRED: Set via environment variables in production +DATABASE_URL=postgresql://user:password@host:5432/prod_db +DEBUG=false +LOG_LEVEL=WARNING +ALLOWED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com +ENABLE_CORS=true +SECRET_KEY= # REQUIRED: Must be set via environment variable diff --git a/.env.testing b/.env.testing new file mode 100644 index 0000000..a9c2236 --- /dev/null +++ b/.env.testing @@ -0,0 +1,7 @@ +APP_ENV=testing +DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +DEBUG=true +LOG_LEVEL=INFO +ALLOWED_ORIGINS=* +ENABLE_CORS=true diff --git a/.gitignore b/.gitignore index a311254..96de303 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ venv.bak/ # Environment .env .env.local +.env.production # Database *.db diff --git a/Makefile b/Makefile index d085693..37b84bb 100644 --- a/Makefile +++ b/Makefile @@ -1,11 +1,13 @@ -.PHONY: help install install-prod run test test-cov format lint check clean +.PHONY: help install install-prod run run-dev run-prod test test-cov format lint check clean help: @echo "Available commands:" @echo " make install - Install all dependencies (dev + prod)" @echo " make install-prod - Install production dependencies only" - @echo " make run - Run the development server" - @echo " make test - Run tests" + @echo " make run - Run the development server (default: development)" + @echo " make run-dev - Run the development server (APP_ENV=development)" + @echo " make run-prod - Run the production server (APP_ENV=production)" + @echo " make test - Run tests (APP_ENV=testing)" @echo " make test-cov - Run tests with coverage report" @echo " make format - Format code with black and isort" @echo " make lint - Run linting with flake8" @@ -18,11 +20,16 @@ install: install-prod: poetry install --only main -run: - poetry run uvicorn app.main:app --reload +run: run-dev + +run-dev: + APP_ENV=development poetry run uvicorn app.main:app --reload + +run-prod: + APP_ENV=production poetry run uvicorn app.main:app test: - poetry run pytest + APP_ENV=testing poetry run pytest test-cov: poetry run pytest --cov=app --cov-report=html --cov-report=term diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md index 547db17..aa71e93 100644 --- a/PROJECT_STRUCTURE.md +++ b/PROJECT_STRUCTURE.md @@ -184,6 +184,138 @@ Configure in `.env` file: - `DATABASE_URL` - Main application database - `TEST_DATABASE_URL` - Separate database for running tests +## Environment Configuration + +This project supports environment-specific configuration using the `APP_ENV` environment variable. + +### Supported Environments + +- **development** - Local development with debug enabled, verbose logging +- **testing** - Test environment for running automated tests +- **production** - Production environment with strict validation and security + +### Environment Selection + +Set the `APP_ENV` environment variable to specify which environment to use: + +```bash +# Development (default) +APP_ENV=development poetry run uvicorn app.main:app --reload + +# Testing +APP_ENV=testing poetry run pytest + +# Production +APP_ENV=production poetry run uvicorn app.main:app +``` + +### Environment Files + +The project uses layered environment file loading: + +1. **`.env`** - Base configuration (lowest priority) +2. **`.env.{environment}`** - Environment-specific overrides +3. **Environment variables** - Highest priority, overrides all files + +**Available env files:** +- `.env.development` - Development defaults (committed) +- `.env.testing` - Testing configuration (committed) +- `.env.production.example` - Production template (committed as example) +- `.env.production` - Actual production config (NOT committed, gitignored) + +### Configuration by Environment + +#### Development +```bash +# Characteristics +- DEBUG=true +- LOG_LEVEL=DEBUG +- Permissive CORS (multiple localhost origins) +- Default database credentials +- Dev secret keys (insecure, for dev only) +``` + +#### Testing +```bash +# Characteristics +- DEBUG=true +- LOG_LEVEL=INFO +- CORS allows all origins (*) +- Uses test database for isolation +- Mock external services +- Test credentials +``` + +#### Production +```bash +# Characteristics +- DEBUG=false +- LOG_LEVEL=WARNING +- Strict CORS validation +- REQUIRES: DATABASE_URL via environment variable +- REQUIRES: SECRET_KEY via environment variable +- REQUIRES: Explicit ALLOWED_ORIGINS (no wildcards) +- Validates critical fields at startup +``` + +### Production Deployment + +For production, **never** commit `.env.production` with real secrets. Instead: + +**Using Environment Variables (Docker/Kubernetes):** +```bash +export APP_ENV=production +export DATABASE_URL=postgresql://user:pass@host:5432/prod_db +export SECRET_KEY=your-production-secret-key +export ALLOWED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com +poetry run uvicorn app.main:app +``` + +**Docker Compose:** +```yaml +services: + app: + environment: + - APP_ENV=production + - DATABASE_URL=${DATABASE_URL} + - SECRET_KEY=${SECRET_KEY} + - ALLOWED_ORIGINS=https://yourdomain.com +``` + +**Kubernetes:** +```yaml +env: + - name: APP_ENV + value: "production" + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: db-credentials + key: url + - name: SECRET_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: secret-key +``` + +### Configuration Variables + +Core configuration variables available across all environments: + +| Variable | Description | Required | +|----------|-------------|----------| +| `APP_ENV` | Environment selection (development/testing/production) | No (default: development) | +| `DATABASE_URL` | PostgreSQL connection string | Yes (production), No (dev/test) | +| `TEST_DATABASE_URL` | Test database connection string | No | +| `DEBUG` | Enable debug mode | No | +| `LOG_LEVEL` | Logging level (DEBUG/INFO/WARNING/ERROR) | No | +| `ALLOWED_ORIGINS` | CORS allowed origins (comma-separated) | Yes (production) | +| `ENABLE_CORS` | Enable/disable CORS middleware | No (default: true) | +| `SECRET_KEY` | Application secret key | Yes (production) | +| `REDIS_URL` | Redis connection string (optional) | No | +| `EXTERNAL_API_URL` | External API endpoint (optional) | No | + ## Testing The test suite includes: diff --git a/app/core/config.py b/app/core/config.py index 5329d88..4e8299e 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -1,29 +1,129 @@ """ -Application configuration settings. +Application configuration settings with environment-specific support. """ +import os +from functools import lru_cache from typing import List -from pydantic_settings import BaseSettings +from pydantic import field_validator +from pydantic_settings import BaseSettings, SettingsConfigDict -class Settings(BaseSettings): - """Application settings.""" +class SettingsBase(BaseSettings): + """Base settings shared across all environments.""" + # App metadata PROJECT_NAME: str = "FastAPI Project" VERSION: str = "1.0.0" DESCRIPTION: str = "FastAPI project with raw SQL database layer" + API_V1_PREFIX: str = "/api/v1" + APP_ENV: str = "development" + + # Database + DATABASE_URL: str + TEST_DATABASE_URL: str = "" + + # CORS + ALLOWED_ORIGINS: List[str] = ["http://localhost:8000"] + ENABLE_CORS: bool = True + + # Logging/Debug + LOG_LEVEL: str = "INFO" + DEBUG: bool = False + + # External services (examples for future use) + REDIS_URL: str = "" + EXTERNAL_API_URL: str = "" + + # Security + SECRET_KEY: str = "" - # Database settings + model_config = SettingsConfigDict( + env_file=(".env", f".env.{os.getenv('APP_ENV', 'development')}"), + env_file_encoding="utf-8", + case_sensitive=True, + extra="ignore" + ) + + @field_validator("ALLOWED_ORIGINS", mode="before") + @classmethod + def parse_cors_origins(cls, v): + """Parse comma-separated CORS origins from env var.""" + if isinstance(v, str): + return [origin.strip() for origin in v.split(",")] + return v + + +class DevelopmentSettings(SettingsBase): + """Development environment settings.""" + + APP_ENV: str = "development" + DEBUG: bool = True + LOG_LEVEL: str = "DEBUG" DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/app_db" + TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" + ALLOWED_ORIGINS: List[str] = [ + "http://localhost:3000", + "http://localhost:8000", + "http://localhost:5173" + ] + SECRET_KEY: str = "dev-secret-key-change-in-production" - # CORS settings - ALLOWED_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:8000"] - # API settings - API_V1_PREFIX: str = "/api/v1" +class TestingSettings(SettingsBase): + """Testing environment settings.""" + + APP_ENV: str = "testing" + DEBUG: bool = True + LOG_LEVEL: str = "INFO" + DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" + TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" + ALLOWED_ORIGINS: List[str] = ["*"] + SECRET_KEY: str = "test-secret-key" + + +class ProductionSettings(SettingsBase): + """Production environment settings with strict validation.""" + + APP_ENV: str = "production" + DEBUG: bool = False + LOG_LEVEL: str = "WARNING" + + @field_validator("SECRET_KEY", "DATABASE_URL") + @classmethod + def validate_required_fields(cls, v, info): + """Ensure critical fields are set in production.""" + if not v: + raise ValueError(f"{info.field_name} must be set in production environment") + return v + + @field_validator("ALLOWED_ORIGINS") + @classmethod + def validate_cors_origins(cls, v): + """Ensure CORS origins are explicitly set in production.""" + if not v or v == ["*"]: + raise ValueError("ALLOWED_ORIGINS must be explicitly set in production") + return v + + +@lru_cache() +def get_settings() -> SettingsBase: + """Factory function to get environment-specific settings (cached singleton).""" + env = os.getenv("APP_ENV", "development").lower() + + settings_map = { + "development": DevelopmentSettings, + "testing": TestingSettings, + "production": ProductionSettings, + } + + settings_class = settings_map.get(env) + if settings_class is None: + raise ValueError( + f"Invalid APP_ENV: {env}. Must be one of: {', '.join(settings_map.keys())}" + ) - class Config: - env_file = ".env" - case_sensitive = True + return settings_class() -settings = Settings() +# Backwards compatible export +settings = get_settings() diff --git a/app/main.py b/app/main.py index 71606af..c64e7e0 100644 --- a/app/main.py +++ b/app/main.py @@ -11,16 +11,18 @@ title=settings.PROJECT_NAME, version=settings.VERSION, description=settings.DESCRIPTION, + debug=settings.DEBUG, ) -# Configure CORS -app.add_middleware( - CORSMiddleware, - allow_origins=settings.ALLOWED_ORIGINS, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) +# Configure CORS (conditional) +if settings.ENABLE_CORS: + app.add_middleware( + CORSMiddleware, + allow_origins=settings.ALLOWED_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) # Include routers app.include_router(health.router, tags=["health"]) diff --git a/pyproject.toml b/pyproject.toml index da1309d..350c368 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ optional = true pytest = "^7.4.4" pytest-asyncio = "^0.23.3" pytest-cov = "^4.1.0" +pytest-env = "^1.1.3" httpx = "^0.26.0" # Code quality @@ -54,6 +55,9 @@ markers = [ "integration: Integration tests", "slow: Slow running tests", ] +env = [ + "APP_ENV=testing" +] # Black code formatter configuration [tool.black] diff --git a/tests/conftest.py b/tests/conftest.py index 26f3112..8f181aa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,22 +5,26 @@ import pytest from fastapi.testclient import TestClient +# Set test environment BEFORE importing app +os.environ["APP_ENV"] = "testing" + from app.main import app +from app.core.config import get_settings from app.db.database import Database from app.db.schema import create_tables, drop_tables @pytest.fixture(scope="session") -def test_db(): +def test_settings(): + """Get test settings.""" + return get_settings() + + +@pytest.fixture(scope="session") +def test_db(test_settings): """Create a test database.""" - # Use environment variable or default test database - test_db_url = os.getenv( - "TEST_DATABASE_URL", - "postgresql://postgres:postgres@localhost:5432/test_db" - ) - - # Create test database connection - db = Database(test_db_url) + # Use settings object instead of os.getenv + db = Database(test_settings.TEST_DATABASE_URL) # Drop and create tables drop_tables() From 0ae8df98389448935a832a045e76357e197f1713 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 11:07:38 +0100 Subject: [PATCH 04/17] Add init of project to Makefile Setup the project with virtualenv Install poetry if needed --- Makefile | 71 ++++++++++++++++++++++++++++++++++- PROJECT_STRUCTURE.md | 89 +++++++++++++++++++++++++++++++++++--------- 2 files changed, 141 insertions(+), 19 deletions(-) diff --git a/Makefile b/Makefile index 37b84bb..e043ebe 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,13 @@ -.PHONY: help install install-prod run run-dev run-prod test test-cov format lint check clean +.PHONY: help init setup check-poetry install-poetry install install-prod setup-db run run-dev run-prod test test-cov format lint check clean help: @echo "Available commands:" + @echo " make init - Initialize project (install Poetry, setup venv, install deps)" + @echo " make setup - Alias for init" + @echo " make check-poetry - Check if Poetry is installed" @echo " make install - Install all dependencies (dev + prod)" @echo " make install-prod - Install production dependencies only" + @echo " make setup-db - Create PostgreSQL databases (app_db, test_db)" @echo " make run - Run the development server (default: development)" @echo " make run-dev - Run the development server (APP_ENV=development)" @echo " make run-prod - Run the production server (APP_ENV=production)" @@ -14,6 +18,71 @@ help: @echo " make check - Run all code quality checks" @echo " make clean - Clean up cache files" +# Initialize project (first-time setup) +init: check-poetry + @echo "🔧 Configuring Poetry to use local virtual environment..." + poetry config virtualenvs.in-project true + @echo "📦 Installing dependencies..." + poetry install + @echo "📄 Setting up environment file..." + @if [ ! -f .env ]; then \ + cp .env.example .env; \ + echo "✅ Created .env from .env.example"; \ + else \ + echo "⚠️ .env already exists, skipping..."; \ + fi + @echo "" + @echo "✅ Project initialized successfully!" + @echo "" + @echo "Next steps:" + @echo " 1. Update .env with your database credentials if needed" + @echo " 2. Create databases: make setup-db" + @echo " 3. Run the app: make run" + @echo "" + +# Alias for init +setup: init + +# Check if Poetry is installed +check-poetry: + @echo "🔍 Checking for Poetry installation..." + @if command -v poetry >/dev/null 2>&1; then \ + echo "✅ Poetry is installed (version: $$(poetry --version))"; \ + else \ + echo "❌ Poetry is not installed!"; \ + echo ""; \ + echo "Please install Poetry using one of these methods:"; \ + echo ""; \ + echo "Official installer (recommended):"; \ + echo " curl -sSL https://install.python-poetry.org | python3 -"; \ + echo ""; \ + echo "Or using pipx:"; \ + echo " pipx install poetry"; \ + echo ""; \ + echo "Or using Homebrew (macOS):"; \ + echo " brew install poetry"; \ + echo ""; \ + echo "After installation, restart your terminal and run 'make init' again."; \ + echo ""; \ + echo "For more info: https://python-poetry.org/docs/#installation"; \ + exit 1; \ + fi + +# Create PostgreSQL databases +setup-db: + @echo "🗄️ Creating PostgreSQL databases..." + @if command -v createdb >/dev/null 2>&1; then \ + createdb app_db 2>/dev/null && echo "✅ Created app_db" || echo "⚠️ app_db already exists or failed"; \ + createdb test_db 2>/dev/null && echo "✅ Created test_db" || echo "⚠️ test_db already exists or failed"; \ + else \ + echo "❌ createdb command not found. Please install PostgreSQL."; \ + echo ""; \ + echo "macOS: brew install postgresql"; \ + echo "Ubuntu/Debian: sudo apt-get install postgresql"; \ + echo ""; \ + exit 1; \ + fi + install: poetry install diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md index aa71e93..0c4d73a 100644 --- a/PROJECT_STRUCTURE.md +++ b/PROJECT_STRUCTURE.md @@ -51,48 +51,101 @@ A FastAPI project with a database layer using raw SQL and comprehensive pytest t ### Prerequisites - Python 3.9 or higher -- Poetry ([installation guide](https://python-poetry.org/docs/#installation)) -- PostgreSQL server +- PostgreSQL server (for database) +- Poetry will be checked/prompted during setup -### Setup Steps +### Quick Start (First Time Setup) -#### For Development Environment +If you don't have Poetry installed yet, just run: -1. Install all dependencies including dev tools (testing, linting, formatting): ```bash -poetry install +make init ``` -#### For Production Environment +This single command will: +1. ✅ Check if Poetry is installed (and provide installation instructions if not) +2. ✅ Configure Poetry to use local `.venv` folder +3. ✅ Install all dependencies +4. ✅ Create `.env` file from `.env.example` +5. ✅ Show you next steps + +After Poetry is installed and `make init` completes, create the databases: -1. Install only production dependencies (no dev tools): ```bash -poetry install --only main +make setup-db ``` -Or exclude dev dependencies: +Then start developing: + ```bash -poetry install --without dev +make run +``` + +That's it! 🚀 + +### Manual Setup (Alternative) + +If you prefer to set up manually: + +#### 1. Install Poetry + +**macOS/Linux (recommended):** +```bash +curl -sSL https://install.python-poetry.org | python3 - ``` -2. Create environment file: +**Using Homebrew (macOS):** ```bash -cp .env.example .env +brew install poetry ``` -3. Update the `.env` file with your PostgreSQL credentials: +**Using pipx:** +```bash +pipx install poetry ``` -DATABASE_URL=postgresql://username:password@localhost:5432/app_db -TEST_DATABASE_URL=postgresql://username:password@localhost:5432/test_db + +After installation, restart your terminal. + +#### 2. Configure Poetry for Local Virtual Environment + +```bash +poetry config virtualenvs.in-project true ``` -4. Create the databases: +#### 3. Install Dependencies + +**For Development Environment:** +```bash +poetry install +``` + +**For Production Environment:** +```bash +poetry install --only main +# or +poetry install --without dev +``` + +#### 4. Create Environment File + +```bash +cp .env.example .env +``` + +Update `.env` with your PostgreSQL credentials if needed (defaults should work for local dev). + +#### 5. Create PostgreSQL Databases + ```bash -# Using psql or your preferred PostgreSQL client createdb app_db createdb test_db ``` +Or use the Makefile: +```bash +make setup-db +``` + ## Running the Application ### Using Make (recommended) From f8ed822f2dd02ac901e39bdd0d7541d17a974a34 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 11:44:42 +0100 Subject: [PATCH 05/17] Change python version Change python version to 3.12 Add pre-commit checks: - ruff - black - isort --- .gitignore | 4 +++ .pre-commit-config.yaml | 36 +++++++++++++++++++++ Makefile | 52 ++++++++++++++++++++---------- PROJECT_STRUCTURE.md | 71 +++++++++++++++++++++++++++++++++++------ pyproject.toml | 35 ++++++++++++++++++-- 5 files changed, 170 insertions(+), 28 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.gitignore b/.gitignore index 96de303..bb809e1 100644 --- a/.gitignore +++ b/.gitignore @@ -56,6 +56,10 @@ venv.bak/ htmlcov/ .tox/ +# Linting/Formatting +.ruff_cache/ +.mypy_cache/ + # OS .DS_Store Thumbs.db diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..b41ed9a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +repos: + # Ruff - Fast Python linter and formatter + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.11 + hooks: + # Run the linter + - id: ruff + args: [--fix] + # Run the formatter + - id: ruff-format + + # Black - Python code formatter + - repo: https://github.com/psf/black + rev: 23.12.1 + hooks: + - id: black + language_version: python3.12 + + # isort - Import sorting + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + args: ["--profile", "black"] + + # Additional helpful hooks + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: check-merge-conflict + - id: check-toml + - id: detect-private-key diff --git a/Makefile b/Makefile index e043ebe..cd8004d 100644 --- a/Makefile +++ b/Makefile @@ -1,22 +1,24 @@ -.PHONY: help init setup check-poetry install-poetry install install-prod setup-db run run-dev run-prod test test-cov format lint check clean +.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit run run-dev run-prod test test-cov format lint ruff check clean help: @echo "Available commands:" - @echo " make init - Initialize project (install Poetry, setup venv, install deps)" - @echo " make setup - Alias for init" - @echo " make check-poetry - Check if Poetry is installed" - @echo " make install - Install all dependencies (dev + prod)" - @echo " make install-prod - Install production dependencies only" - @echo " make setup-db - Create PostgreSQL databases (app_db, test_db)" - @echo " make run - Run the development server (default: development)" - @echo " make run-dev - Run the development server (APP_ENV=development)" - @echo " make run-prod - Run the production server (APP_ENV=production)" - @echo " make test - Run tests (APP_ENV=testing)" - @echo " make test-cov - Run tests with coverage report" - @echo " make format - Format code with black and isort" - @echo " make lint - Run linting with flake8" - @echo " make check - Run all code quality checks" - @echo " make clean - Clean up cache files" + @echo " make init - Initialize project (install Poetry, setup venv, install deps)" + @echo " make setup - Alias for init" + @echo " make check-poetry - Check if Poetry is installed" + @echo " make install - Install all dependencies (dev + prod)" + @echo " make install-prod - Install production dependencies only" + @echo " make setup-db - Create PostgreSQL databases (app_db, test_db)" + @echo " make setup-precommit - Install pre-commit hooks" + @echo " make run - Run the development server (default: development)" + @echo " make run-dev - Run the development server (APP_ENV=development)" + @echo " make run-prod - Run the production server (APP_ENV=production)" + @echo " make test - Run tests (APP_ENV=testing)" + @echo " make test-cov - Run tests with coverage report" + @echo " make format - Format code with ruff, black and isort" + @echo " make lint - Run linting with ruff, flake8 and mypy" + @echo " make ruff - Run ruff linter and formatter" + @echo " make check - Run all code quality checks" + @echo " make clean - Clean up cache files" # Initialize project (first-time setup) init: check-poetry @@ -24,6 +26,8 @@ init: check-poetry poetry config virtualenvs.in-project true @echo "📦 Installing dependencies..." poetry install + @echo "🪝 Setting up pre-commit hooks..." + poetry run pre-commit install @echo "📄 Setting up environment file..." @if [ ! -f .env ]; then \ cp .env.example .env; \ @@ -83,6 +87,15 @@ setup-db: exit 1; \ fi +# Setup pre-commit hooks +setup-precommit: + @echo "🪝 Installing pre-commit hooks..." + poetry run pre-commit install + @echo "✅ Pre-commit hooks installed!" + @echo "" + @echo "Hooks will run automatically on git commit." + @echo "To run manually: pre-commit run --all-files" + install: poetry install @@ -104,13 +117,19 @@ test-cov: poetry run pytest --cov=app --cov-report=html --cov-report=term format: + poetry run ruff format app/ tests/ poetry run black app/ tests/ poetry run isort app/ tests/ lint: + poetry run ruff check app/ tests/ poetry run flake8 app/ tests/ poetry run mypy app/ +ruff: + poetry run ruff check app/ tests/ --fix + poetry run ruff format app/ tests/ + check: format lint test @echo "All checks passed!" @@ -122,4 +141,5 @@ clean: rm -rf .pytest_cache rm -rf htmlcov rm -rf .mypy_cache + rm -rf .ruff_cache @echo "Cleaned up cache files" diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md index 0c4d73a..b83a1ca 100644 --- a/PROJECT_STRUCTURE.md +++ b/PROJECT_STRUCTURE.md @@ -50,7 +50,7 @@ A FastAPI project with a database layer using raw SQL and comprehensive pytest t ## Installation ### Prerequisites -- Python 3.9 or higher +- Python 3.12 or higher - PostgreSQL server (for database) - Poetry will be checked/prompted during setup @@ -440,46 +440,99 @@ poetry export -f requirements.txt --output requirements-dev.txt --without-hashes ## Code Quality Tools (Dev Environment) -The dev environment includes tools for code quality: +The dev environment includes comprehensive code quality tools: + +- **Ruff** - Fast Python linter and formatter (replaces flake8, isort for speed) +- **Black** - Python code formatter +- **isort** - Import sorting +- **flake8** - Style guide enforcement +- **mypy** - Static type checking +- **pre-commit** - Git hooks for automatic code quality checks + +### Pre-commit Hooks + +Pre-commit hooks run automatically on every `git commit` to ensure code quality: + +**Setup (done automatically by `make init`):** +```bash +make setup-precommit +# or +poetry run pre-commit install +``` + +**Run manually on all files:** +```bash +poetry run pre-commit run --all-files +``` + +**Skip hooks (when needed):** +```bash +git commit --no-verify +``` ### Using Make (recommended) + ```bash -make format # Format code with black and isort -make lint # Lint with flake8 and type check with mypy +make ruff # Run ruff linter and formatter (fast!) +make format # Format with ruff, black, and isort +make lint # Lint with ruff, flake8, and mypy make check # Run format, lint, and tests make clean # Clean up cache files ``` ### Using Poetry directly -Format code with Black: +**Ruff (fastest option):** +```bash +# Lint and auto-fix issues +poetry run ruff check app/ tests/ --fix + +# Format code +poetry run ruff format app/ tests/ +``` + +**Black:** ```bash poetry run black app/ tests/ ``` -Sort imports with isort: +**isort:** ```bash poetry run isort app/ tests/ ``` -Lint with flake8: +**flake8:** ```bash poetry run flake8 app/ tests/ ``` -Type check with mypy: +**mypy:** ```bash poetry run mypy app/ ``` -Run all quality checks: +**Run all quality checks:** ```bash +poetry run ruff check app/ tests/ --fix && \ +poetry run ruff format app/ tests/ && \ poetry run black app/ tests/ && \ poetry run isort app/ tests/ && \ poetry run flake8 app/ tests/ && \ poetry run mypy app/ ``` +### What Runs on Git Commit + +When you run `git commit`, pre-commit automatically runs: + +1. **Ruff** - Linting with auto-fix +2. **Ruff** - Code formatting +3. **Black** - Additional formatting +4. **isort** - Import sorting +5. **pre-commit-hooks** - Trailing whitespace, EOF, YAML/TOML validation, etc. + +If any hook fails, the commit is blocked until you fix the issues. + ## Adapting for Other Use Cases This structure provides a foundation that can be adapted for various applications: diff --git a/pyproject.toml b/pyproject.toml index 350c368..14e4d8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ packages = [{include = "app"}] # Production dependencies only [tool.poetry.dependencies] -python = "^3.9" +python = "^3.12" fastapi = "^0.109.0" uvicorn = {extras = ["standard"], version = "^0.27.0"} pydantic = "^2.5.3" @@ -31,10 +31,12 @@ pytest-env = "^1.1.3" httpx = "^0.26.0" # Code quality +ruff = "^0.1.11" black = "^23.12.1" isort = "^5.13.2" flake8 = "^7.0.0" mypy = "^1.8.0" +pre-commit = "^3.6.0" # Development tools ipython = "^8.20.0" @@ -62,7 +64,7 @@ env = [ # Black code formatter configuration [tool.black] line-length = 100 -target-version = ['py39', 'py310', 'py311'] +target-version = ['py312'] include = '\.pyi?$' exclude = ''' /( @@ -87,8 +89,35 @@ ensure_newline_before_comments = true # mypy type checking configuration [tool.mypy] -python_version = "3.9" +python_version = "3.12" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = false ignore_missing_imports = true + +# Ruff linter and formatter configuration +[tool.ruff] +line-length = 100 +target-version = "py312" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade +] +ignore = [ + "E501", # line too long (handled by black) + "B008", # do not perform function calls in argument defaults +] + +[tool.ruff.lint.isort] +known-first-party = ["app"] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" From 2c44000dcc33bde63058a5c9aab090e115353784 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 13:57:08 +0100 Subject: [PATCH 06/17] Change API models and DB schemas Change models and schemas from initial structure to requrements. Use server model Change tests --- .env.example | 6 +- app/api/servers.py | 70 +++++++++++++ app/api/users.py | 58 ---------- app/core/config.py | 39 ++++--- app/db/schema.py | 30 +++--- app/main.py | 4 +- app/models/server.py | 85 +++++++++++++++ app/models/user.py | 93 ----------------- app/schemas/server.py | 69 ++++++++++++ app/schemas/user.py | 49 --------- app/services/server_service.py | 65 ++++++++++++ app/services/user_service.py | 94 ----------------- pyproject.toml | 6 +- tests/conftest.py | 11 -- tests/test_api.py | 119 --------------------- tests/test_models.py | 145 ------------------------- tests/test_server_api.py | 186 +++++++++++++++++++++++++++++++++ tests/test_services.py | 127 ---------------------- 18 files changed, 516 insertions(+), 740 deletions(-) create mode 100644 app/api/servers.py delete mode 100644 app/api/users.py create mode 100644 app/models/server.py delete mode 100644 app/models/user.py create mode 100644 app/schemas/server.py delete mode 100644 app/schemas/user.py create mode 100644 app/services/server_service.py delete mode 100644 app/services/user_service.py delete mode 100644 tests/test_api.py delete mode 100644 tests/test_models.py create mode 100644 tests/test_server_api.py delete mode 100644 tests/test_services.py diff --git a/.env.example b/.env.example index 783815d..5824991 100644 --- a/.env.example +++ b/.env.example @@ -26,6 +26,6 @@ API_V1_PREFIX=/api/v1 # Security SECRET_KEY=dev-secret-key-change-in-production -# External Services (optional) -REDIS_URL=redis://localhost:6379/0 -EXTERNAL_API_URL=http://localhost:9000 +# External Services (optional) - Uncomment and set if needed +# REDIS_URL=redis://localhost:6379/0 +# EXTERNAL_API_URL=http://localhost:9000 diff --git a/app/api/servers.py b/app/api/servers.py new file mode 100644 index 0000000..ef7fc2d --- /dev/null +++ b/app/api/servers.py @@ -0,0 +1,70 @@ +""" +Server API endpoints. +""" +from typing import List +from fastapi import APIRouter, HTTPException, status, Query + +from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse +from app.services.server_service import ServerService + +router = APIRouter() + + +@router.post("/", response_model=ServerResponse, status_code=status.HTTP_201_CREATED) +async def create_server(server: ServerCreate): + """Create a new server.""" + service = ServerService() + try: + return service.create_server(server) + except ValueError as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + + +@router.get("/", response_model=List[ServerResponse]) +async def list_servers( + skip: int = Query(0, ge=0, description="Number of records to skip"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of records to return") +): + """List all servers with pagination.""" + service = ServerService() + return service.get_servers(skip=skip, limit=limit) + + +@router.get("/{server_id}", response_model=ServerResponse) +async def get_server(server_id: int): + """Get server by ID.""" + service = ServerService() + server = service.get_server(server_id) + if not server: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Server with id {server_id} not found" + ) + return server + + +@router.put("/{server_id}", response_model=ServerResponse) +async def update_server(server_id: int, server_update: ServerUpdate): + """Update server.""" + service = ServerService() + try: + server = service.update_server(server_id, server_update) + if not server: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Server with id {server_id} not found" + ) + return server + except ValueError as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + + +@router.delete("/{server_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_server(server_id: int): + """Delete server.""" + service = ServerService() + if not service.delete_server(server_id): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Server with id {server_id} not found" + ) diff --git a/app/api/users.py b/app/api/users.py deleted file mode 100644 index 480ccd7..0000000 --- a/app/api/users.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -User API endpoints. -""" -from typing import List -from fastapi import APIRouter, HTTPException, status - -from app.schemas.user import UserCreate, UserUpdate, UserResponse -from app.services.user_service import UserService - -router = APIRouter() - - -@router.post("/", response_model=UserResponse, status_code=status.HTTP_201_CREATED) -async def create_user(user: UserCreate): - """Create a new user.""" - service = UserService() - try: - return service.create_user(user) - except ValueError as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) - - -@router.get("/", response_model=List[UserResponse]) -async def get_users(skip: int = 0, limit: int = 100): - """Get all users with pagination.""" - service = UserService() - return service.get_users(skip=skip, limit=limit) - - -@router.get("/{user_id}", response_model=UserResponse) -async def get_user(user_id: int): - """Get user by ID.""" - service = UserService() - user = service.get_user(user_id) - if not user: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - return user - - -@router.put("/{user_id}", response_model=UserResponse) -async def update_user(user_id: int, user_update: UserUpdate): - """Update user.""" - service = UserService() - try: - user = service.update_user(user_id, user_update) - if not user: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - return user - except ValueError as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) - - -@router.delete("/{user_id}", status_code=status.HTTP_204_NO_CONTENT) -async def delete_user(user_id: int): - """Delete user.""" - service = UserService() - if not service.delete_user(user_id): - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") diff --git a/app/core/config.py b/app/core/config.py index 4e8299e..122d542 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -3,8 +3,8 @@ """ import os from functools import lru_cache -from typing import List -from pydantic import field_validator +from typing import List, Union +from pydantic import Field, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict @@ -22,8 +22,8 @@ class SettingsBase(BaseSettings): DATABASE_URL: str TEST_DATABASE_URL: str = "" - # CORS - ALLOWED_ORIGINS: List[str] = ["http://localhost:8000"] + # CORS (stored as string, converted to list via property) + allowed_origins_str: str = Field(default="http://localhost:8000", validation_alias="ALLOWED_ORIGINS") ENABLE_CORS: bool = True # Logging/Debug @@ -41,16 +41,17 @@ class SettingsBase(BaseSettings): env_file=(".env", f".env.{os.getenv('APP_ENV', 'development')}"), env_file_encoding="utf-8", case_sensitive=True, - extra="ignore" + extra="ignore", + env_parse_none_str="null", # Treat "null" string as None + populate_by_name=True, ) - @field_validator("ALLOWED_ORIGINS", mode="before") - @classmethod - def parse_cors_origins(cls, v): - """Parse comma-separated CORS origins from env var.""" - if isinstance(v, str): - return [origin.strip() for origin in v.split(",")] - return v + @property + def ALLOWED_ORIGINS(self) -> List[str]: + """Parse and return CORS origins as a list.""" + if not self.allowed_origins_str or self.allowed_origins_str.strip() == "": + return ["http://localhost:8000"] + return [origin.strip() for origin in self.allowed_origins_str.split(",") if origin.strip()] class DevelopmentSettings(SettingsBase): @@ -61,11 +62,7 @@ class DevelopmentSettings(SettingsBase): LOG_LEVEL: str = "DEBUG" DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/app_db" TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" - ALLOWED_ORIGINS: List[str] = [ - "http://localhost:3000", - "http://localhost:8000", - "http://localhost:5173" - ] + allowed_origins_str: str = "http://localhost:3000,http://localhost:8000,http://localhost:5173" SECRET_KEY: str = "dev-secret-key-change-in-production" @@ -77,7 +74,7 @@ class TestingSettings(SettingsBase): LOG_LEVEL: str = "INFO" DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" - ALLOWED_ORIGINS: List[str] = ["*"] + allowed_origins_str: str = "*" SECRET_KEY: str = "test-secret-key" @@ -96,12 +93,12 @@ def validate_required_fields(cls, v, info): raise ValueError(f"{info.field_name} must be set in production environment") return v - @field_validator("ALLOWED_ORIGINS") + @field_validator("allowed_origins_str") @classmethod def validate_cors_origins(cls, v): """Ensure CORS origins are explicitly set in production.""" - if not v or v == ["*"]: - raise ValueError("ALLOWED_ORIGINS must be explicitly set in production") + if not v or v.strip() == "" or v == "*": + raise ValueError("ALLOWED_ORIGINS must be explicitly set in production (not * or empty)") return v diff --git a/app/db/schema.py b/app/db/schema.py index 38b9d9e..1b2f9f6 100644 --- a/app/db/schema.py +++ b/app/db/schema.py @@ -8,28 +8,28 @@ def create_tables(): """Create all database tables.""" db = get_db() - # Users table - users_table = """ - CREATE TABLE IF NOT EXISTS users ( + # Servers table + servers_table = """ + CREATE TABLE IF NOT EXISTS servers ( id SERIAL PRIMARY KEY, - email VARCHAR(255) UNIQUE NOT NULL, - username VARCHAR(255) UNIQUE NOT NULL, - full_name VARCHAR(255), - hashed_password VARCHAR(255) NOT NULL, - is_active BOOLEAN DEFAULT TRUE, + hostname VARCHAR(255) UNIQUE NOT NULL, + ip_address VARCHAR(45) NOT NULL, + name VARCHAR(255), + state VARCHAR(20) NOT NULL DEFAULT 'active', created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT chk_state CHECK (state IN ('active', 'offline', 'retired')) ) """ # Create indexes - email_index = "CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)" - username_index = "CREATE INDEX IF NOT EXISTS idx_users_username ON users(username)" + hostname_index = "CREATE INDEX IF NOT EXISTS idx_servers_hostname ON servers(hostname)" + state_index = "CREATE INDEX IF NOT EXISTS idx_servers_state ON servers(state)" with db.get_cursor() as cursor: - cursor.execute(users_table) - cursor.execute(email_index) - cursor.execute(username_index) + cursor.execute(servers_table) + cursor.execute(hostname_index) + cursor.execute(state_index) def drop_tables(): @@ -37,4 +37,4 @@ def drop_tables(): db = get_db() with db.get_cursor() as cursor: - cursor.execute("DROP TABLE IF EXISTS users") + cursor.execute("DROP TABLE IF EXISTS servers") diff --git a/app/main.py b/app/main.py index c64e7e0..328ee75 100644 --- a/app/main.py +++ b/app/main.py @@ -5,7 +5,7 @@ from fastapi.middleware.cors import CORSMiddleware from app.core.config import settings -from app.api import users, health +from app.api import servers, health app = FastAPI( title=settings.PROJECT_NAME, @@ -26,7 +26,7 @@ # Include routers app.include_router(health.router, tags=["health"]) -app.include_router(users.router, prefix="/api/v1/users", tags=["users"]) +app.include_router(servers.router, prefix="/servers", tags=["servers"]) @app.on_event("startup") diff --git a/app/models/server.py b/app/models/server.py new file mode 100644 index 0000000..3511df8 --- /dev/null +++ b/app/models/server.py @@ -0,0 +1,85 @@ +""" +Server data access layer using raw SQL. +""" +from typing import List, Optional +from datetime import datetime + +from app.db.database import Database +from app.schemas.server import ServerCreate, ServerUpdate + + +class ServerModel: + """Server model with raw SQL queries.""" + + def __init__(self, db: Database): + self.db = db + + def create(self, server: ServerCreate) -> int: + """Create a new server.""" + query = """ + INSERT INTO servers (hostname, ip_address, name, state) + VALUES (%s, %s, %s, %s) + """ + params = (server.hostname, server.ip_address, server.name, server.state.value) + return self.db.execute_insert(query, params) + + def get_by_id(self, server_id: int) -> Optional[dict]: + """Get server by ID.""" + query = "SELECT * FROM servers WHERE id = %s" + results = self.db.execute_query(query, (server_id,)) + if results: + return dict(results[0]) + return None + + def get_by_hostname(self, hostname: str) -> Optional[dict]: + """Get server by hostname.""" + query = "SELECT * FROM servers WHERE hostname = %s" + results = self.db.execute_query(query, (hostname,)) + if results: + return dict(results[0]) + return None + + def get_all(self, skip: int = 0, limit: int = 100) -> List[dict]: + """Get all servers with pagination.""" + query = "SELECT * FROM servers ORDER BY created_at DESC LIMIT %s OFFSET %s" + results = self.db.execute_query(query, (limit, skip)) + return [dict(row) for row in results] + + def update(self, server_id: int, server: ServerUpdate) -> int: + """Update server.""" + updates = [] + params = [] + + if server.hostname is not None: + updates.append("hostname = %s") + params.append(server.hostname) + if server.ip_address is not None: + updates.append("ip_address = %s") + params.append(server.ip_address) + if server.name is not None: + updates.append("name = %s") + params.append(server.name) + if server.state is not None: + updates.append("state = %s") + params.append(server.state.value) + + if not updates: + return 0 + + updates.append("updated_at = %s") + params.append(datetime.utcnow()) + params.append(server_id) + + query = f"UPDATE servers SET {', '.join(updates)} WHERE id = %s" + return self.db.execute_update(query, tuple(params)) + + def delete(self, server_id: int) -> int: + """Delete server.""" + query = "DELETE FROM servers WHERE id = %s" + return self.db.execute_update(query, (server_id,)) + + def count(self) -> int: + """Count total servers.""" + query = "SELECT COUNT(*) as count FROM servers" + result = self.db.execute_query(query) + return result[0]['count'] if result else 0 diff --git a/app/models/user.py b/app/models/user.py deleted file mode 100644 index d75f881..0000000 --- a/app/models/user.py +++ /dev/null @@ -1,93 +0,0 @@ -""" -User data access layer using raw SQL. -""" -from typing import List, Optional -from datetime import datetime - -from app.db.database import Database -from app.schemas.user import UserCreate, UserUpdate - - -class UserModel: - """User model with raw SQL queries.""" - - def __init__(self, db: Database): - self.db = db - - def create(self, user: UserCreate, hashed_password: str) -> int: - """Create a new user.""" - query = """ - INSERT INTO users (email, username, full_name, hashed_password) - VALUES (?, ?, ?, ?) - """ - params = (user.email, user.username, user.full_name, hashed_password) - return self.db.execute_insert(query, params) - - def get_by_id(self, user_id: int) -> Optional[dict]: - """Get user by ID.""" - query = "SELECT * FROM users WHERE id = ?" - results = self.db.execute_query(query, (user_id,)) - if results: - return dict(results[0]) - return None - - def get_by_email(self, email: str) -> Optional[dict]: - """Get user by email.""" - query = "SELECT * FROM users WHERE email = ?" - results = self.db.execute_query(query, (email,)) - if results: - return dict(results[0]) - return None - - def get_by_username(self, username: str) -> Optional[dict]: - """Get user by username.""" - query = "SELECT * FROM users WHERE username = ?" - results = self.db.execute_query(query, (username,)) - if results: - return dict(results[0]) - return None - - def get_all(self, skip: int = 0, limit: int = 100) -> List[dict]: - """Get all users with pagination.""" - query = "SELECT * FROM users ORDER BY created_at DESC LIMIT ? OFFSET ?" - results = self.db.execute_query(query, (limit, skip)) - return [dict(row) for row in results] - - def update(self, user_id: int, user: UserUpdate, hashed_password: Optional[str] = None) -> int: - """Update user.""" - updates = [] - params = [] - - if user.email is not None: - updates.append("email = ?") - params.append(user.email) - if user.username is not None: - updates.append("username = ?") - params.append(user.username) - if user.full_name is not None: - updates.append("full_name = ?") - params.append(user.full_name) - if hashed_password is not None: - updates.append("hashed_password = ?") - params.append(hashed_password) - - if not updates: - return 0 - - updates.append("updated_at = ?") - params.append(datetime.utcnow().isoformat()) - params.append(user_id) - - query = f"UPDATE users SET {', '.join(updates)} WHERE id = ?" - return self.db.execute_update(query, tuple(params)) - - def delete(self, user_id: int) -> int: - """Delete user.""" - query = "DELETE FROM users WHERE id = ?" - return self.db.execute_update(query, (user_id,)) - - def count(self) -> int: - """Count total users.""" - query = "SELECT COUNT(*) as count FROM users" - result = self.db.execute_query(query) - return result[0]['count'] if result else 0 diff --git a/app/schemas/server.py b/app/schemas/server.py new file mode 100644 index 0000000..9189621 --- /dev/null +++ b/app/schemas/server.py @@ -0,0 +1,69 @@ +""" +Server schema definitions using Pydantic. +""" +from typing import Optional +from datetime import datetime +from enum import Enum +from pydantic import BaseModel, Field, field_validator +import ipaddress + + +class ServerState(str, Enum): + """Server state enumeration.""" + ACTIVE = "active" + OFFLINE = "offline" + RETIRED = "retired" + + +class ServerBase(BaseModel): + """Base server schema.""" + hostname: str = Field(..., min_length=1, max_length=255, description="Unique server hostname") + ip_address: str = Field(..., description="Server IP address") + name: Optional[str] = Field(None, max_length=255, description="Server name/description") + state: ServerState = Field(default=ServerState.ACTIVE, description="Server state") + + @field_validator("ip_address") + @classmethod + def validate_ip_address(cls, v: str) -> str: + """Validate that ip_address is a valid IP address.""" + try: + # This will raise ValueError if invalid + ipaddress.ip_address(v) + return v + except ValueError: + raise ValueError(f"Invalid IP address: {v}") + + +class ServerCreate(ServerBase): + """Schema for creating a server.""" + pass + + +class ServerUpdate(BaseModel): + """Schema for updating a server.""" + hostname: Optional[str] = Field(None, min_length=1, max_length=255) + ip_address: Optional[str] = None + name: Optional[str] = Field(None, max_length=255) + state: Optional[ServerState] = None + + @field_validator("ip_address") + @classmethod + def validate_ip_address(cls, v: Optional[str]) -> Optional[str]: + """Validate that ip_address is a valid IP address if provided.""" + if v is not None: + try: + ipaddress.ip_address(v) + return v + except ValueError: + raise ValueError(f"Invalid IP address: {v}") + return v + + +class ServerResponse(ServerBase): + """Schema for server response.""" + id: int + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True diff --git a/app/schemas/user.py b/app/schemas/user.py deleted file mode 100644 index 868510c..0000000 --- a/app/schemas/user.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -User schema definitions using Pydantic. -""" -from typing import Optional -from datetime import datetime -from pydantic import BaseModel, EmailStr, Field - - -class UserBase(BaseModel): - """Base user schema.""" - email: EmailStr - username: str = Field(..., min_length=3, max_length=50) - full_name: Optional[str] = None - - -class UserCreate(UserBase): - """Schema for creating a user.""" - password: str = Field(..., min_length=8) - - -class UserUpdate(BaseModel): - """Schema for updating a user.""" - email: Optional[EmailStr] = None - username: Optional[str] = Field(None, min_length=3, max_length=50) - full_name: Optional[str] = None - password: Optional[str] = Field(None, min_length=8) - - -class UserInDB(UserBase): - """Schema for user in database.""" - id: int - hashed_password: str - is_active: bool - created_at: datetime - updated_at: datetime - - class Config: - from_attributes = True - - -class UserResponse(UserBase): - """Schema for user response.""" - id: int - is_active: bool - created_at: datetime - updated_at: datetime - - class Config: - from_attributes = True diff --git a/app/services/server_service.py b/app/services/server_service.py new file mode 100644 index 0000000..a43ff4f --- /dev/null +++ b/app/services/server_service.py @@ -0,0 +1,65 @@ +""" +Server service layer with business logic. +""" +from typing import List, Optional + +from app.db.database import get_db +from app.models.server import ServerModel +from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse + + +class ServerService: + """Server service for business logic.""" + + def __init__(self): + self.db = get_db() + self.server_model = ServerModel(self.db) + + def create_server(self, server: ServerCreate) -> ServerResponse: + """Create a new server.""" + # Check if hostname already exists + if self.server_model.get_by_hostname(server.hostname): + raise ValueError(f"Server with hostname '{server.hostname}' already exists") + + # Create server + server_id = self.server_model.create(server) + + # Retrieve and return created server + server_dict = self.server_model.get_by_id(server_id) + return ServerResponse(**server_dict) + + def get_server(self, server_id: int) -> Optional[ServerResponse]: + """Get server by ID.""" + server_dict = self.server_model.get_by_id(server_id) + if server_dict: + return ServerResponse(**server_dict) + return None + + def get_servers(self, skip: int = 0, limit: int = 100) -> List[ServerResponse]: + """Get all servers with pagination.""" + servers = self.server_model.get_all(skip, limit) + return [ServerResponse(**server) for server in servers] + + def update_server(self, server_id: int, server_update: ServerUpdate) -> Optional[ServerResponse]: + """Update server.""" + # Check if server exists + existing_server = self.server_model.get_by_id(server_id) + if not existing_server: + return None + + # Check hostname uniqueness if being updated + if server_update.hostname and server_update.hostname != existing_server['hostname']: + if self.server_model.get_by_hostname(server_update.hostname): + raise ValueError(f"Server with hostname '{server_update.hostname}' already exists") + + # Update server + self.server_model.update(server_id, server_update) + + # Retrieve and return updated server + server_dict = self.server_model.get_by_id(server_id) + return ServerResponse(**server_dict) + + def delete_server(self, server_id: int) -> bool: + """Delete server.""" + rows_affected = self.server_model.delete(server_id) + return rows_affected > 0 diff --git a/app/services/user_service.py b/app/services/user_service.py deleted file mode 100644 index 86c26ba..0000000 --- a/app/services/user_service.py +++ /dev/null @@ -1,94 +0,0 @@ -""" -User service layer with business logic. -""" -from typing import List, Optional -from passlib.context import CryptContext - -from app.db.database import get_db -from app.models.user import UserModel -from app.schemas.user import UserCreate, UserUpdate, UserResponse - - -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") - - -class UserService: - """User service for business logic.""" - - def __init__(self): - self.db = get_db() - self.user_model = UserModel(self.db) - - @staticmethod - def hash_password(password: str) -> str: - """Hash a password.""" - return pwd_context.hash(password) - - @staticmethod - def verify_password(plain_password: str, hashed_password: str) -> bool: - """Verify a password against a hash.""" - return pwd_context.verify(plain_password, hashed_password) - - def create_user(self, user: UserCreate) -> UserResponse: - """Create a new user.""" - # Check if email already exists - if self.user_model.get_by_email(user.email): - raise ValueError("Email already registered") - - # Check if username already exists - if self.user_model.get_by_username(user.username): - raise ValueError("Username already taken") - - # Hash password and create user - hashed_password = self.hash_password(user.password) - user_id = self.user_model.create(user, hashed_password) - - # Retrieve and return created user - user_dict = self.user_model.get_by_id(user_id) - return UserResponse(**user_dict) - - def get_user(self, user_id: int) -> Optional[UserResponse]: - """Get user by ID.""" - user_dict = self.user_model.get_by_id(user_id) - if user_dict: - return UserResponse(**user_dict) - return None - - def get_users(self, skip: int = 0, limit: int = 100) -> List[UserResponse]: - """Get all users with pagination.""" - users = self.user_model.get_all(skip, limit) - return [UserResponse(**user) for user in users] - - def update_user(self, user_id: int, user_update: UserUpdate) -> Optional[UserResponse]: - """Update user.""" - # Check if user exists - existing_user = self.user_model.get_by_id(user_id) - if not existing_user: - return None - - # Check email uniqueness if being updated - if user_update.email and user_update.email != existing_user['email']: - if self.user_model.get_by_email(user_update.email): - raise ValueError("Email already registered") - - # Check username uniqueness if being updated - if user_update.username and user_update.username != existing_user['username']: - if self.user_model.get_by_username(user_update.username): - raise ValueError("Username already taken") - - # Hash password if being updated - hashed_password = None - if user_update.password: - hashed_password = self.hash_password(user_update.password) - - # Update user - self.user_model.update(user_id, user_update, hashed_password) - - # Retrieve and return updated user - user_dict = self.user_model.get_by_id(user_id) - return UserResponse(**user_dict) - - def delete_user(self, user_id: int) -> bool: - """Delete user.""" - rows_affected = self.user_model.delete(user_id) - return rows_affected > 0 diff --git a/pyproject.toml b/pyproject.toml index 14e4d8c..63dd0be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,8 @@ [tool.poetry] -name = "fastapi-postgres-app" +name = "server-inventory-mgmt" version = "1.0.0" -description = "FastAPI project with raw SQL database layer using PostgreSQL" -authors = ["Your Name "] +description = "Server inventory management API with PostgreSQL" +authors = ["Dario Huzjak "] readme = "README.md" packages = [{include = "app"}] diff --git a/tests/conftest.py b/tests/conftest.py index 8f181aa..4b7d071 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,14 +50,3 @@ def client(): """Create a test client.""" with TestClient(app) as test_client: yield test_client - - -@pytest.fixture -def sample_user_data(): - """Sample user data for testing.""" - return { - "email": "test@example.com", - "username": "testuser", - "full_name": "Test User", - "password": "testpassword123" - } diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100644 index d22dd82..0000000 --- a/tests/test_api.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -API endpoint tests. -""" -import pytest -from fastapi import status - - -@pytest.mark.integration -def test_health_check(client): - """Test health check endpoint.""" - response = client.get("/health") - assert response.status_code == status.HTTP_200_OK - assert response.json() == {"status": "healthy"} - - -@pytest.mark.integration -def test_root_endpoint(client): - """Test root endpoint.""" - response = client.get("/") - assert response.status_code == status.HTTP_200_OK - assert "message" in response.json() - - -@pytest.mark.integration -def test_create_user(client, sample_user_data): - """Test creating a user.""" - response = client.post("/api/v1/users/", json=sample_user_data) - assert response.status_code == status.HTTP_201_CREATED - - data = response.json() - assert data["email"] == sample_user_data["email"] - assert data["username"] == sample_user_data["username"] - assert data["full_name"] == sample_user_data["full_name"] - assert "id" in data - assert "password" not in data - assert "hashed_password" not in data - - -@pytest.mark.integration -def test_create_user_duplicate_email(client, sample_user_data): - """Test creating a user with duplicate email.""" - # Create first user - client.post("/api/v1/users/", json=sample_user_data) - - # Try to create second user with same email - response = client.post("/api/v1/users/", json=sample_user_data) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert "Email already registered" in response.json()["detail"] - - -@pytest.mark.integration -def test_get_users(client, sample_user_data): - """Test getting all users.""" - # Create a user first - client.post("/api/v1/users/", json=sample_user_data) - - # Get all users - response = client.get("/api/v1/users/") - assert response.status_code == status.HTTP_200_OK - - data = response.json() - assert isinstance(data, list) - assert len(data) > 0 - - -@pytest.mark.integration -def test_get_user_by_id(client, sample_user_data): - """Test getting a user by ID.""" - # Create a user - create_response = client.post("/api/v1/users/", json=sample_user_data) - user_id = create_response.json()["id"] - - # Get the user - response = client.get(f"/api/v1/users/{user_id}") - assert response.status_code == status.HTTP_200_OK - - data = response.json() - assert data["id"] == user_id - assert data["email"] == sample_user_data["email"] - - -@pytest.mark.integration -def test_get_user_not_found(client): - """Test getting a non-existent user.""" - response = client.get("/api/v1/users/99999") - assert response.status_code == status.HTTP_404_NOT_FOUND - - -@pytest.mark.integration -def test_update_user(client, sample_user_data): - """Test updating a user.""" - # Create a user - create_response = client.post("/api/v1/users/", json=sample_user_data) - user_id = create_response.json()["id"] - - # Update the user - update_data = {"full_name": "Updated Name"} - response = client.put(f"/api/v1/users/{user_id}", json=update_data) - assert response.status_code == status.HTTP_200_OK - - data = response.json() - assert data["full_name"] == "Updated Name" - assert data["email"] == sample_user_data["email"] - - -@pytest.mark.integration -def test_delete_user(client, sample_user_data): - """Test deleting a user.""" - # Create a user - create_response = client.post("/api/v1/users/", json=sample_user_data) - user_id = create_response.json()["id"] - - # Delete the user - response = client.delete(f"/api/v1/users/{user_id}") - assert response.status_code == status.HTTP_204_NO_CONTENT - - # Verify user is deleted - get_response = client.get(f"/api/v1/users/{user_id}") - assert get_response.status_code == status.HTTP_404_NOT_FOUND diff --git a/tests/test_models.py b/tests/test_models.py deleted file mode 100644 index 00401b9..0000000 --- a/tests/test_models.py +++ /dev/null @@ -1,145 +0,0 @@ -""" -Database model tests. -""" -import pytest - -from app.models.user import UserModel -from app.schemas.user import UserCreate - - -@pytest.mark.unit -def test_create_user(clean_db): - """Test creating a user in the database.""" - user_model = UserModel(clean_db) - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user_id = user_model.create(user_data, "hashed_password") - assert user_id > 0 - - # Verify user was created - user = user_model.get_by_id(user_id) - assert user is not None - assert user["email"] == "test@example.com" - assert user["username"] == "testuser" - - -@pytest.mark.unit -def test_get_user_by_email(clean_db): - """Test getting a user by email.""" - user_model = UserModel(clean_db) - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user_model.create(user_data, "hashed_password") - user = user_model.get_by_email("test@example.com") - - assert user is not None - assert user["email"] == "test@example.com" - - -@pytest.mark.unit -def test_get_user_by_username(clean_db): - """Test getting a user by username.""" - user_model = UserModel(clean_db) - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user_model.create(user_data, "hashed_password") - user = user_model.get_by_username("testuser") - - assert user is not None - assert user["username"] == "testuser" - - -@pytest.mark.unit -def test_get_all_users(clean_db): - """Test getting all users.""" - user_model = UserModel(clean_db) - - # Create multiple users - for i in range(3): - user_data = UserCreate( - email=f"test{i}@example.com", - username=f"testuser{i}", - full_name=f"Test User {i}", - password="testpass123" - ) - user_model.create(user_data, "hashed_password") - - users = user_model.get_all() - assert len(users) == 3 - - -@pytest.mark.unit -def test_update_user(clean_db): - """Test updating a user.""" - user_model = UserModel(clean_db) - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user_id = user_model.create(user_data, "hashed_password") - - from app.schemas.user import UserUpdate - update_data = UserUpdate(full_name="Updated Name") - rows_affected = user_model.update(user_id, update_data) - - assert rows_affected > 0 - - updated_user = user_model.get_by_id(user_id) - assert updated_user["full_name"] == "Updated Name" - - -@pytest.mark.unit -def test_delete_user(clean_db): - """Test deleting a user.""" - user_model = UserModel(clean_db) - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user_id = user_model.create(user_data, "hashed_password") - rows_affected = user_model.delete(user_id) - - assert rows_affected > 0 - - deleted_user = user_model.get_by_id(user_id) - assert deleted_user is None - - -@pytest.mark.unit -def test_count_users(clean_db): - """Test counting users.""" - user_model = UserModel(clean_db) - - # Create multiple users - for i in range(5): - user_data = UserCreate( - email=f"test{i}@example.com", - username=f"testuser{i}", - full_name=f"Test User {i}", - password="testpass123" - ) - user_model.create(user_data, "hashed_password") - - count = user_model.count() - assert count == 5 diff --git a/tests/test_server_api.py b/tests/test_server_api.py new file mode 100644 index 0000000..aa0be5a --- /dev/null +++ b/tests/test_server_api.py @@ -0,0 +1,186 @@ +""" +Server API endpoint tests. +""" +import pytest +from fastapi import status + + +@pytest.fixture +def sample_server_data(): + """Sample server data for testing.""" + return { + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active" + } + + +@pytest.mark.integration +def test_create_server(client, sample_server_data): + """Test creating a server.""" + response = client.post("/servers/", json=sample_server_data) + assert response.status_code == status.HTTP_201_CREATED + + data = response.json() + assert data["hostname"] == sample_server_data["hostname"] + assert data["ip_address"] == sample_server_data["ip_address"] + assert data["name"] == sample_server_data["name"] + assert data["state"] == sample_server_data["state"] + assert "id" in data + assert "created_at" in data + assert "updated_at" in data + + +@pytest.mark.integration +def test_create_server_duplicate_hostname(client, sample_server_data): + """Test creating a server with duplicate hostname.""" + # Create first server + client.post("/servers/", json=sample_server_data) + + # Try to create second server with same hostname + response = client.post("/servers/", json=sample_server_data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "already exists" in response.json()["detail"] + + +@pytest.mark.integration +def test_create_server_invalid_ip(client): + """Test creating a server with invalid IP address.""" + server_data = { + "hostname": "invalid-server", + "ip_address": "999.999.999.999", # Invalid IP + "state": "active" + } + response = client.post("/servers/", json=server_data) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.integration +def test_list_servers(client, sample_server_data): + """Test listing all servers.""" + # Create a server first + client.post("/servers/", json=sample_server_data) + + # List all servers + response = client.get("/servers/") + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert isinstance(data, list) + assert len(data) > 0 + + +@pytest.mark.integration +def test_get_server_by_id(client, sample_server_data): + """Test getting a server by ID.""" + # Create a server + create_response = client.post("/servers/", json=sample_server_data) + server_id = create_response.json()["id"] + + # Get the server + response = client.get(f"/servers/{server_id}") + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert data["id"] == server_id + assert data["hostname"] == sample_server_data["hostname"] + + +@pytest.mark.integration +def test_get_server_not_found(client): + """Test getting a non-existent server.""" + response = client.get("/servers/99999") + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.integration +def test_update_server(client, sample_server_data): + """Test updating a server.""" + # Create a server + create_response = client.post("/servers/", json=sample_server_data) + server_id = create_response.json()["id"] + + # Update the server + update_data = { + "name": "Updated Web Server", + "state": "offline" + } + response = client.put(f"/servers/{server_id}", json=update_data) + assert response.status_code == status.HTTP_200_OK + + data = response.json() + assert data["name"] == "Updated Web Server" + assert data["state"] == "offline" + assert data["hostname"] == sample_server_data["hostname"] # Should remain unchanged + + +@pytest.mark.integration +def test_update_server_invalid_ip(client, sample_server_data): + """Test updating a server with invalid IP.""" + # Create a server + create_response = client.post("/servers/", json=sample_server_data) + server_id = create_response.json()["id"] + + # Try to update with invalid IP + update_data = {"ip_address": "not-an-ip"} + response = client.put(f"/servers/{server_id}", json=update_data) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.integration +def test_delete_server(client, sample_server_data): + """Test deleting a server.""" + # Create a server + create_response = client.post("/servers/", json=sample_server_data) + server_id = create_response.json()["id"] + + # Delete the server + response = client.delete(f"/servers/{server_id}") + assert response.status_code == status.HTTP_204_NO_CONTENT + + # Verify server is deleted + get_response = client.get(f"/servers/{server_id}") + assert get_response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.integration +def test_delete_server_not_found(client): + """Test deleting a non-existent server.""" + response = client.delete("/servers/99999") + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.integration +def test_server_state_enum(client): + """Test server state enumeration values.""" + valid_states = ["active", "offline", "retired"] + + for state in valid_states: + server_data = { + "hostname": f"server-{state}", + "ip_address": "10.0.0.1", + "state": state + } + response = client.post("/servers/", json=server_data) + assert response.status_code == status.HTTP_201_CREATED + assert response.json()["state"] == state + + +@pytest.mark.integration +def test_server_pagination(client): + """Test server list pagination.""" + # Create multiple servers + for i in range(5): + server_data = { + "hostname": f"server-{i:02d}", + "ip_address": f"192.168.1.{i+1}", + "state": "active" + } + client.post("/servers/", json=server_data) + + # Test pagination + response = client.get("/servers/?skip=2&limit=2") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert len(data) <= 2 diff --git a/tests/test_services.py b/tests/test_services.py deleted file mode 100644 index 626b8c6..0000000 --- a/tests/test_services.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -Service layer tests. -""" -import pytest - -from app.services.user_service import UserService -from app.schemas.user import UserCreate, UserUpdate - - -@pytest.mark.unit -def test_hash_password(): - """Test password hashing.""" - password = "testpassword123" - hashed = UserService.hash_password(password) - - assert hashed != password - assert UserService.verify_password(password, hashed) - - -@pytest.mark.unit -def test_verify_password_invalid(): - """Test password verification with wrong password.""" - password = "testpassword123" - hashed = UserService.hash_password(password) - - assert not UserService.verify_password("wrongpassword", hashed) - - -@pytest.mark.integration -def test_create_user_service(clean_db): - """Test creating a user through service.""" - service = UserService() - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - user = service.create_user(user_data) - - assert user.email == "test@example.com" - assert user.username == "testuser" - assert user.id > 0 - - -@pytest.mark.integration -def test_create_user_duplicate_email(clean_db): - """Test creating user with duplicate email.""" - service = UserService() - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - service.create_user(user_data) - - # Try to create another user with same email - duplicate_data = UserCreate( - email="test@example.com", - username="anotheruser", - full_name="Another User", - password="testpass123" - ) - - with pytest.raises(ValueError, match="Email already registered"): - service.create_user(duplicate_data) - - -@pytest.mark.integration -def test_get_user_service(clean_db): - """Test getting a user through service.""" - service = UserService() - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - created_user = service.create_user(user_data) - retrieved_user = service.get_user(created_user.id) - - assert retrieved_user is not None - assert retrieved_user.id == created_user.id - - -@pytest.mark.integration -def test_update_user_service(clean_db): - """Test updating a user through service.""" - service = UserService() - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - created_user = service.create_user(user_data) - - update_data = UserUpdate(full_name="Updated Name") - updated_user = service.update_user(created_user.id, update_data) - - assert updated_user is not None - assert updated_user.full_name == "Updated Name" - - -@pytest.mark.integration -def test_delete_user_service(clean_db): - """Test deleting a user through service.""" - service = UserService() - user_data = UserCreate( - email="test@example.com", - username="testuser", - full_name="Test User", - password="testpass123" - ) - - created_user = service.create_user(user_data) - deleted = service.delete_user(created_user.id) - - assert deleted is True - - retrieved_user = service.get_user(created_user.id) - assert retrieved_user is None From 1bb4ce333d34adcd88d127a04c5a7852a9f34750 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 14:00:52 +0100 Subject: [PATCH 07/17] Add postgres db for development Add postgres db for testing in dev stage Use docker-compose to bring it up and initial setup --- .dockerignore | 64 ++ DOCKER.md | 172 ++++ Makefile | 50 +- docker-compose.yml | 42 + poetry.lock | 1988 +++++++++++++++++++++++++++++++++++++++++++ scripts/init-db.sql | 6 + 6 files changed, 2318 insertions(+), 4 deletions(-) create mode 100644 .dockerignore create mode 100644 DOCKER.md create mode 100644 docker-compose.yml create mode 100644 poetry.lock create mode 100644 scripts/init-db.sql diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..c9415c1 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,64 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +env/ +venv/ +.venv/ +ENV/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Testing +.pytest_cache/ +.coverage +htmlcov/ +.tox/ +.mypy_cache/ +.ruff_cache/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment +.env +.env.local +.env.production + +# Git +.git/ +.gitignore + +# Documentation +*.md +docs/ + +# CI/CD +.github/ + +# Docker +docker-compose.yml +Dockerfile + +# Other +.DS_Store +*.log diff --git a/DOCKER.md b/DOCKER.md new file mode 100644 index 0000000..cf36262 --- /dev/null +++ b/DOCKER.md @@ -0,0 +1,172 @@ +# Docker Setup Guide + +This project includes a Docker Compose configuration for running PostgreSQL locally for development and testing. + +## Quick Start + +### Start PostgreSQL Database + +```bash +make docker-up +``` + +This will: +- Pull the PostgreSQL 16 Alpine image +- Create and start the PostgreSQL container +- Create both `app_db` and `test_db` databases +- Expose PostgreSQL on port 5432 + +### Connection Details + +``` +Host: localhost +Port: 5432 +User: postgres +Password: postgres +Databases: app_db, test_db +``` + +### Stop the Database + +```bash +make docker-down +``` + +### View Logs + +```bash +make docker-logs +``` + +Press `Ctrl+C` to exit the logs view. + +### Clean Up (Stop and Remove Volumes) + +```bash +make docker-clean +``` + +**Warning:** This will delete all data in the PostgreSQL databases. + +## Using with the Application + +Once the database is running, you can start the application: + +```bash +# Development mode with auto-reload +make run + +# Or explicitly +make run-dev +``` + +Run tests: + +```bash +make test +``` + +## Database Management + +### Connect to PostgreSQL CLI + +```bash +docker exec -it server-inventory-postgres psql -U postgres -d app_db +``` + +For test database: + +```bash +docker exec -it server-inventory-postgres psql -U postgres -d test_db +``` + +### List Databases + +```bash +docker exec server-inventory-postgres psql -U postgres -c "\l" +``` + +### Backup Database + +```bash +docker exec server-inventory-postgres pg_dump -U postgres app_db > backup.sql +``` + +### Restore Database + +```bash +docker exec -i server-inventory-postgres psql -U postgres -d app_db < backup.sql +``` + +## Environment Variables + +The Docker Compose setup uses the following environment variables (already configured in `.env` files): + +```bash +DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db +TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +``` + +## Troubleshooting + +### Port 5432 Already in Use + +If you have PostgreSQL already running locally, you'll need to either: +1. Stop the local PostgreSQL service +2. Or change the port mapping in `docker-compose.yml`: + ```yaml + ports: + - "5433:5432" # Use port 5433 instead + ``` + Then update your `.env` files to use port 5433. + +### Container Won't Start + +Check the logs: +```bash +docker-compose logs postgres +``` + +### Reset Everything + +```bash +make docker-clean +make docker-up +``` + +## Running the Application in Docker (Optional) + +The `docker-compose.yml` file includes a commented-out `app` service. To run the entire application stack in Docker: + +1. Uncomment the `app` service in `docker-compose.yml` +2. Create a `Dockerfile` in the project root (see below) +3. Run: `docker-compose up` + +### Sample Dockerfile + +```dockerfile +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Install Poetry +RUN pip install poetry + +# Copy project files +COPY pyproject.toml poetry.lock ./ +RUN poetry config virtualenvs.create false \ + && poetry install --no-interaction --no-ansi + +# Copy application code +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +``` diff --git a/Makefile b/Makefile index cd8004d..f842054 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit run run-dev run-prod test test-cov format lint ruff check clean +.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit docker-up docker-down docker-logs docker-clean run run-dev run-prod test test-cov format lint ruff check clean help: @echo "Available commands:" @@ -9,11 +9,21 @@ help: @echo " make install-prod - Install production dependencies only" @echo " make setup-db - Create PostgreSQL databases (app_db, test_db)" @echo " make setup-precommit - Install pre-commit hooks" + @echo "" + @echo "Docker commands:" + @echo " make docker-up - Start PostgreSQL database with Docker Compose" + @echo " make docker-down - Stop Docker Compose services" + @echo " make docker-logs - View Docker Compose logs" + @echo " make docker-clean - Stop services and remove volumes" + @echo "" + @echo "Application commands:" @echo " make run - Run the development server (default: development)" @echo " make run-dev - Run the development server (APP_ENV=development)" @echo " make run-prod - Run the production server (APP_ENV=production)" @echo " make test - Run tests (APP_ENV=testing)" @echo " make test-cov - Run tests with coverage report" + @echo "" + @echo "Code quality commands:" @echo " make format - Format code with ruff, black and isort" @echo " make lint - Run linting with ruff, flake8 and mypy" @echo " make ruff - Run ruff linter and formatter" @@ -39,9 +49,11 @@ init: check-poetry @echo "✅ Project initialized successfully!" @echo "" @echo "Next steps:" - @echo " 1. Update .env with your database credentials if needed" - @echo " 2. Create databases: make setup-db" - @echo " 3. Run the app: make run" + @echo " 1. Start PostgreSQL database:" + @echo " - Using Docker: make docker-up (recommended)" + @echo " - Or install PostgreSQL locally and run: make setup-db" + @echo " 2. Run the app: make run" + @echo " 3. Run tests: make test" @echo "" # Alias for init @@ -98,10 +110,40 @@ setup-precommit: install: poetry install + @poetry run pre-commit install --install-hooks 2>/dev/null || true install-prod: poetry install --only main +# Docker Compose commands +docker-up: + @echo "🐳 Starting PostgreSQL database..." + docker-compose up -d + @echo "⏳ Waiting for PostgreSQL to be ready..." + @sleep 3 + @echo "✅ PostgreSQL is running!" + @echo "" + @echo "Connection details:" + @echo " Host: localhost" + @echo " Port: 5432" + @echo " User: postgres" + @echo " Password: postgres" + @echo " Databases: app_db, test_db" + @echo "" + +docker-down: + @echo "🛑 Stopping Docker Compose services..." + docker-compose down + @echo "✅ Services stopped!" + +docker-logs: + docker-compose logs -f + +docker-clean: + @echo "🧹 Stopping services and removing volumes..." + docker-compose down -v + @echo "✅ Cleaned up!" + run: run-dev run-dev: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0ab2f0e --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,42 @@ +version: '3.8' + +services: + postgres: + image: postgres:16-alpine + container_name: server-inventory-postgres + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: app_db + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + - ./scripts/init-db.sql:/docker-entrypoint-initdb.d/init-db.sql + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + + # Uncomment to run the application in Docker + # app: + # build: . + # container_name: server-inventory-app + # environment: + # APP_ENV: development + # DATABASE_URL: postgresql://postgres:postgres@postgres:5432/app_db + # TEST_DATABASE_URL: postgresql://postgres:postgres@postgres:5432/test_db + # ports: + # - "8000:8000" + # volumes: + # - ./app:/app/app + # depends_on: + # postgres: + # condition: service_healthy + # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload + +volumes: + postgres_data: + driver: local diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..5571733 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1988 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.12.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb"}, + {file = "anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0"}, +] + +[package.dependencies] +idna = ">=2.8" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + +[[package]] +name = "asttokens" +version = "3.0.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a"}, + {file = "asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7"}, +] + +[package.extras] +astroid = ["astroid (>=2,<5)"] +test = ["astroid (>=2,<5)", "pytest (<9.0)", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "bcrypt" +version = "5.0.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83"}, + {file = "bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746"}, + {file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e"}, + {file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d"}, + {file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba"}, + {file = "bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41"}, + {file = "bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861"}, + {file = "bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e"}, + {file = "bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5"}, + {file = "bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493"}, + {file = "bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b"}, + {file = "bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c"}, + {file = "bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4"}, + {file = "bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e"}, + {file = "bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d"}, + {file = "bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993"}, + {file = "bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75"}, + {file = "bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff"}, + {file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4"}, + {file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb"}, + {file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c"}, + {file = "bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb"}, + {file = "bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538"}, + {file = "bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9"}, + {file = "bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980"}, + {file = "bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8"}, + {file = "bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a"}, + {file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1"}, + {file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42"}, + {file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10"}, + {file = "bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172"}, + {file = "bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683"}, + {file = "bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2"}, + {file = "bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927"}, + {file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534"}, + {file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4"}, + {file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911"}, + {file = "bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4"}, + {file = "bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + +[[package]] +name = "cfgv" +version = "3.5.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0"}, + {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, +] + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.13.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070"}, + {file = "coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8"}, + {file = "coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f"}, + {file = "coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303"}, + {file = "coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820"}, + {file = "coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753"}, + {file = "coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b"}, + {file = "coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe"}, + {file = "coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7"}, + {file = "coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf"}, + {file = "coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd"}, + {file = "coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef"}, + {file = "coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae"}, + {file = "coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080"}, + {file = "coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf"}, + {file = "coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511"}, + {file = "coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1"}, + {file = "coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a"}, + {file = "coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6"}, + {file = "coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a"}, + {file = "coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e"}, + {file = "coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46"}, + {file = "coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39"}, + {file = "coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e"}, + {file = "coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256"}, + {file = "coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927"}, + {file = "coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f"}, + {file = "coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc"}, + {file = "coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b"}, + {file = "coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28"}, + {file = "coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2"}, + {file = "coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7"}, + {file = "coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc"}, + {file = "coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a"}, + {file = "coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904"}, + {file = "coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "decorator" +version = "5.2.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + +[[package]] +name = "distlib" +version = "0.4.0" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, + {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, +] + +[package.extras] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] + +[[package]] +name = "email-validator" +version = "2.3.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, + {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "executing" +version = "2.2.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017"}, + {file = "executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] + +[[package]] +name = "fastapi" +version = "0.109.2" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.36.3,<0.37.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.20.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a"}, + {file = "filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c"}, +] + +[[package]] +name = "flake8" +version = "7.3.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, + {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.14.0,<2.15.0" +pyflakes = ">=3.4.0,<3.5.0" + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.7.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:11d01b0ff1fe02c4c32d60af61a4d613b74fad069e47e06e9067758c01e9ac78"}, + {file = "httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d86c1e5afdc479a6fdabf570be0d3eb791df0ae727e8dbc0259ed1249998d4"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8c751014e13d88d2be5f5f14fc8b89612fcfa92a9cc480f2bc1598357a23a05"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:654968cb6b6c77e37b832a9be3d3ecabb243bbe7a0b8f65fbc5b6b04c8fcabed"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b580968316348b474b020edf3988eecd5d6eec4634ee6561e72ae3a2a0e00a8a"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d496e2f5245319da9d764296e86c5bb6fcf0cf7a8806d3d000717a889c8c0b7b"}, + {file = "httptools-0.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cbf8317bfccf0fed3b5680c559d3459cccf1abe9039bfa159e62e391c7270568"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec"}, + {file = "httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c"}, + {file = "httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650"}, + {file = "httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca"}, + {file = "httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ac50afa68945df63ec7a2707c506bd02239272288add34539a2ef527254626a4"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de987bb4e7ac95b99b805b99e0aae0ad51ae61df4263459d36e07cf4052d8b3a"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d169162803a24425eb5e4d51d79cbf429fd7a491b9e570a55f495ea55b26f0bf"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49794f9250188a57fa73c706b46cb21a313edb00d337ca4ce1a011fe3c760b28"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aeefa0648362bb97a7d6b5ff770bfb774930a327d7f65f8208394856862de517"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0d92b10dbf0b3da4823cde6a96d18e6ae358a9daa741c71448975f6a2c339cad"}, + {file = "httptools-0.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:5ddbd045cfcb073db2449563dd479057f2c2b681ebc232380e63ef15edc9c023"}, + {file = "httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9"}, +] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "identify" +version = "2.6.15" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757"}, + {file = "identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "ipython" +version = "8.37.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, + {file = "ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jedi" +version = "0.19.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[package.dependencies] +parso = ">=0.8.4,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] + +[[package]] +name = "librt" +version = "0.7.4" +description = "Mypyc runtime library" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "librt-0.7.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc300cb5a5a01947b1ee8099233156fdccd5001739e5f596ecfbc0dab07b5a3b"}, + {file = "librt-0.7.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee8d3323d921e0f6919918a97f9b5445a7dfe647270b2629ec1008aa676c0bc0"}, + {file = "librt-0.7.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:95cb80854a355b284c55f79674f6187cc9574df4dc362524e0cce98c89ee8331"}, + {file = "librt-0.7.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ca1caedf8331d8ad6027f93b52d68ed8f8009f5c420c246a46fe9d3be06be0f"}, + {file = "librt-0.7.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a6f1236151e6fe1da289351b5b5bce49651c91554ecc7b70a947bced6fe212"}, + {file = "librt-0.7.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7766b57aeebaf3f1dac14fdd4a75c9a61f2ed56d8ebeefe4189db1cb9d2a3783"}, + {file = "librt-0.7.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1c4c89fb01157dd0a3bfe9e75cd6253b0a1678922befcd664eca0772a4c6c979"}, + {file = "librt-0.7.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f7fa8beef580091c02b4fd26542de046b2abfe0aaefa02e8bcf68acb7618f2b3"}, + {file = "librt-0.7.4-cp310-cp310-win32.whl", hash = "sha256:543c42fa242faae0466fe72d297976f3c710a357a219b1efde3a0539a68a6997"}, + {file = "librt-0.7.4-cp310-cp310-win_amd64.whl", hash = "sha256:25cc40d8eb63f0a7ea4c8f49f524989b9df901969cb860a2bc0e4bad4b8cb8a8"}, + {file = "librt-0.7.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3485b9bb7dfa66167d5500ffdafdc35415b45f0da06c75eb7df131f3357b174a"}, + {file = "librt-0.7.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:188b4b1a770f7f95ea035d5bbb9d7367248fc9d12321deef78a269ebf46a5729"}, + {file = "librt-0.7.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1b668b1c840183e4e38ed5a99f62fac44c3a3eef16870f7f17cfdfb8b47550ed"}, + {file = "librt-0.7.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0e8f864b521f6cfedb314d171630f827efee08f5c3462bcbc2244ab8e1768cd6"}, + {file = "librt-0.7.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4df7c9def4fc619a9c2ab402d73a0c5b53899abe090e0100323b13ccb5a3dd82"}, + {file = "librt-0.7.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f79bc3595b6ed159a1bf0cdc70ed6ebec393a874565cab7088a219cca14da727"}, + {file = "librt-0.7.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77772a4b8b5f77d47d883846928c36d730b6e612a6388c74cba33ad9eb149c11"}, + {file = "librt-0.7.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:064a286e6ab0b4c900e228ab4fa9cb3811b4b83d3e0cc5cd816b2d0f548cb61c"}, + {file = "librt-0.7.4-cp311-cp311-win32.whl", hash = "sha256:42da201c47c77b6cc91fc17e0e2b330154428d35d6024f3278aa2683e7e2daf2"}, + {file = "librt-0.7.4-cp311-cp311-win_amd64.whl", hash = "sha256:d31acb5886c16ae1711741f22504195af46edec8315fe69b77e477682a87a83e"}, + {file = "librt-0.7.4-cp311-cp311-win_arm64.whl", hash = "sha256:114722f35093da080a333b3834fff04ef43147577ed99dd4db574b03a5f7d170"}, + {file = "librt-0.7.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7dd3b5c37e0fb6666c27cf4e2c88ae43da904f2155c4cfc1e5a2fdce3b9fcf92"}, + {file = "librt-0.7.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c5de1928c486201b23ed0cc4ac92e6e07be5cd7f3abc57c88a9cf4f0f32108"}, + {file = "librt-0.7.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:078ae52ffb3f036396cc4aed558e5b61faedd504a3c1f62b8ae34bf95ae39d94"}, + {file = "librt-0.7.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce58420e25097b2fc201aef9b9f6d65df1eb8438e51154e1a7feb8847e4a55ab"}, + {file = "librt-0.7.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b719c8730c02a606dc0e8413287e8e94ac2d32a51153b300baf1f62347858fba"}, + {file = "librt-0.7.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3749ef74c170809e6dee68addec9d2458700a8de703de081c888e92a8b015cf9"}, + {file = "librt-0.7.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b35c63f557653c05b5b1b6559a074dbabe0afee28ee2a05b6c9ba21ad0d16a74"}, + {file = "librt-0.7.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1ef704e01cb6ad39ad7af668d51677557ca7e5d377663286f0ee1b6b27c28e5f"}, + {file = "librt-0.7.4-cp312-cp312-win32.whl", hash = "sha256:c66c2b245926ec15188aead25d395091cb5c9df008d3b3207268cd65557d6286"}, + {file = "librt-0.7.4-cp312-cp312-win_amd64.whl", hash = "sha256:71a56f4671f7ff723451f26a6131754d7c1809e04e22ebfbac1db8c9e6767a20"}, + {file = "librt-0.7.4-cp312-cp312-win_arm64.whl", hash = "sha256:419eea245e7ec0fe664eb7e85e7ff97dcdb2513ca4f6b45a8ec4a3346904f95a"}, + {file = "librt-0.7.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d44a1b1ba44cbd2fc3cb77992bef6d6fdb1028849824e1dd5e4d746e1f7f7f0b"}, + {file = "librt-0.7.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9cab4b3de1f55e6c30a84c8cee20e4d3b2476f4d547256694a1b0163da4fe32"}, + {file = "librt-0.7.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2857c875f1edd1feef3c371fbf830a61b632fb4d1e57160bb1e6a3206e6abe67"}, + {file = "librt-0.7.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b370a77be0a16e1ad0270822c12c21462dc40496e891d3b0caf1617c8cc57e20"}, + {file = "librt-0.7.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d05acd46b9a52087bfc50c59dfdf96a2c480a601e8898a44821c7fd676598f74"}, + {file = "librt-0.7.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:70969229cb23d9c1a80e14225838d56e464dc71fa34c8342c954fc50e7516dee"}, + {file = "librt-0.7.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4450c354b89dbb266730893862dbff06006c9ed5b06b6016d529b2bf644fc681"}, + {file = "librt-0.7.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:adefe0d48ad35b90b6f361f6ff5a1bd95af80c17d18619c093c60a20e7a5b60c"}, + {file = "librt-0.7.4-cp313-cp313-win32.whl", hash = "sha256:21ea710e96c1e050635700695095962a22ea420d4b3755a25e4909f2172b4ff2"}, + {file = "librt-0.7.4-cp313-cp313-win_amd64.whl", hash = "sha256:772e18696cf5a64afee908662fbcb1f907460ddc851336ee3a848ef7684c8e1e"}, + {file = "librt-0.7.4-cp313-cp313-win_arm64.whl", hash = "sha256:52e34c6af84e12921748c8354aa6acf1912ca98ba60cdaa6920e34793f1a0788"}, + {file = "librt-0.7.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4f1ee004942eaaed6e06c087d93ebc1c67e9a293e5f6b9b5da558df6bf23dc5d"}, + {file = "librt-0.7.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d854c6dc0f689bad7ed452d2a3ecff58029d80612d336a45b62c35e917f42d23"}, + {file = "librt-0.7.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a4f7339d9e445280f23d63dea842c0c77379c4a47471c538fc8feedab9d8d063"}, + {file = "librt-0.7.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39003fc73f925e684f8521b2dbf34f61a5deb8a20a15dcf53e0d823190ce8848"}, + {file = "librt-0.7.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb15ee29d95875ad697d449fe6071b67f730f15a6961913a2b0205015ca0843"}, + {file = "librt-0.7.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:02a69369862099e37d00765583052a99d6a68af7e19b887e1b78fee0146b755a"}, + {file = "librt-0.7.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ec72342cc4d62f38b25a94e28b9efefce41839aecdecf5e9627473ed04b7be16"}, + {file = "librt-0.7.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:776dbb9bfa0fc5ce64234b446995d8d9f04badf64f544ca036bd6cff6f0732ce"}, + {file = "librt-0.7.4-cp314-cp314-win32.whl", hash = "sha256:0f8cac84196d0ffcadf8469d9ded4d4e3a8b1c666095c2a291e22bf58e1e8a9f"}, + {file = "librt-0.7.4-cp314-cp314-win_amd64.whl", hash = "sha256:037f5cb6fe5abe23f1dc058054d50e9699fcc90d0677eee4e4f74a8677636a1a"}, + {file = "librt-0.7.4-cp314-cp314-win_arm64.whl", hash = "sha256:a5deebb53d7a4d7e2e758a96befcd8edaaca0633ae71857995a0f16033289e44"}, + {file = "librt-0.7.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b4c25312c7f4e6ab35ab16211bdf819e6e4eddcba3b2ea632fb51c9a2a97e105"}, + {file = "librt-0.7.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:618b7459bb392bdf373f2327e477597fff8f9e6a1878fffc1b711c013d1b0da4"}, + {file = "librt-0.7.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1437c3f72a30c7047f16fd3e972ea58b90172c3c6ca309645c1c68984f05526a"}, + {file = "librt-0.7.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c96cb76f055b33308f6858b9b594618f1b46e147a4d03a4d7f0c449e304b9b95"}, + {file = "librt-0.7.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28f990e6821204f516d09dc39966ef8b84556ffd648d5926c9a3f681e8de8906"}, + {file = "librt-0.7.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc4aebecc79781a1b77d7d4e7d9fe080385a439e198d993b557b60f9117addaf"}, + {file = "librt-0.7.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:022cc673e69283a42621dd453e2407cf1647e77f8bd857d7ad7499901e62376f"}, + {file = "librt-0.7.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2b3ca211ae8ea540569e9c513da052699b7b06928dcda61247cb4f318122bdb5"}, + {file = "librt-0.7.4-cp314-cp314t-win32.whl", hash = "sha256:8a461f6456981d8c8e971ff5a55f2e34f4e60871e665d2f5fde23ee74dea4eeb"}, + {file = "librt-0.7.4-cp314-cp314t-win_amd64.whl", hash = "sha256:721a7b125a817d60bf4924e1eec2a7867bfcf64cfc333045de1df7a0629e4481"}, + {file = "librt-0.7.4-cp314-cp314t-win_arm64.whl", hash = "sha256:76b2ba71265c0102d11458879b4d53ccd0b32b0164d14deb8d2b598a018e502f"}, + {file = "librt-0.7.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6fc4aa67fedd827a601f97f0e61cc72711d0a9165f2c518e9a7c38fc1568b9ad"}, + {file = "librt-0.7.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e710c983d29d9cc4da29113b323647db286eaf384746344f4a233708cca1a82c"}, + {file = "librt-0.7.4-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:43a2515a33f2bc17b15f7fb49ff6426e49cb1d5b2539bc7f8126b9c5c7f37164"}, + {file = "librt-0.7.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0fd766bb9ace3498f6b93d32f30c0e7c8ce6b727fecbc84d28160e217bb66254"}, + {file = "librt-0.7.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce1b44091355b68cffd16e2abac07c1cafa953fa935852d3a4dd8975044ca3bf"}, + {file = "librt-0.7.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5a72b905420c4bb2c10c87b5c09fe6faf4a76d64730e3802feef255e43dfbf5a"}, + {file = "librt-0.7.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07c4d7c9305e75a0edd3427b79c7bd1d019cd7eddaa7c89dbb10e0c7946bffbb"}, + {file = "librt-0.7.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2e734c2c54423c6dcc77f58a8585ba83b9f72e422f9edf09cab1096d4a4bdc82"}, + {file = "librt-0.7.4-cp39-cp39-win32.whl", hash = "sha256:a34ae11315d4e26326aaf04e21ccd8d9b7de983635fba38d73e203a9c8e3fe3d"}, + {file = "librt-0.7.4-cp39-cp39-win_amd64.whl", hash = "sha256:7e4b5ffa1614ad4f32237d739699be444be28de95071bfa4e66a8da9fa777798"}, + {file = "librt-0.7.4.tar.gz", hash = "sha256:3871af56c59864d5fd21d1ac001eb2fb3b140d52ba0454720f2e4a19812404ba"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.2.1" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76"}, + {file = "matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe"}, +] + +[package.dependencies] +traitlets = "*" + +[package.extras] +test = ["flake8", "nbdime", "nbval", "notebook", "pytest"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy" +version = "1.19.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e"}, + {file = "mypy-1.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376"}, + {file = "mypy-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, +] + +[package.dependencies] +librt = {version = ">=0.6.2", markers = "platform_python_implementation != \"PyPy\""} +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parso" +version = "0.8.5" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887"}, + {file = "parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.dependencies] +bcrypt = {version = ">=3.1.0", optional = true, markers = "extra == \"bcrypt\""} + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.5.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31"}, + {file = "platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda"}, +] + +[package.extras] +docs = ["furo (>=2025.9.25)", "proselint (>=0.14)", "sphinx (>=8.2.3)", "sphinx-autodoc-typehints (>=3.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.4.2)", "pytest-cov (>=7)", "pytest-mock (>=3.15.1)"] +type = ["mypy (>=1.18.2)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20e7fb94e20b03dcc783f76c0865f9da39559dcc0c28dd1a3fce0d01902a6b9c"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bdab48575b6f870f465b397c38f1b415520e9879fdf10a53ee4f49dcbdf8a21"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9d3a9edcfbe77a3ed4bc72836d466dfce4174beb79eda79ea155cc77237ed9e8"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:44fc5c2b8fa871ce7f0023f619f1349a0aa03a0857f2c96fbc01c657dcbbdb49"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c55460033867b4622cda1b6872edf445809535144152e5d14941ef591980edf"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2d11098a83cca92deaeaed3d58cfd150d49b3b06ee0d0852be466bf87596899e"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:691c807d94aecfbc76a14e1408847d59ff5b5906a04a23e12a89007672b9e819"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b81627b691f29c4c30a8f322546ad039c40c328373b11dff7490a3e1b517855"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:b637d6d941209e8d96a072d7977238eea128046effbf37d1d8b2c0764750017d"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41360b01c140c2a03d346cec3280cf8a71aa07d94f3b1509fa0161c366af66b4"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:875039274f8a2361e5207857899706da840768e2a775bf8c65e82f60b197df02"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycodestyle" +version = "2.14.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, + {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809"}, + {file = "pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pyflakes" +version = "3.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, + {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, +] + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-env" +version = "1.1.3" +description = "pytest plugin that allows you to add environment variables." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_env-1.1.3-py3-none-any.whl", hash = "sha256:aada77e6d09fcfb04540a6e462c58533c37df35fa853da78707b17ec04d17dfc"}, + {file = "pytest_env-1.1.3.tar.gz", hash = "sha256:fcd7dc23bb71efd3d35632bde1bbe5ee8c8dc4489d6617fb010674880d96216b"}, +] + +[package.dependencies] +pytest = ">=7.4.3" + +[package.extras] +test = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "pytest-mock (>=3.12)"] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.6" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, + {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, +] + +[package.extras] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "starlette" +version = "0.36.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "uvicorn" +version = "0.27.1" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, + {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.22.1" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c"}, + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac33ed96229b7790eb729702751c0e93ac5bc3bcf52ae9eccbff30da09194b86"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a592b043a47ad17911add5fbd087c76716d7c9ccc1d64ec9249ceafd735f03c2"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1489cf791aa7b6e8c8be1c5a080bae3a672791fcb4e9e12249b05862a2ca9cec"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:80eee091fe128e425177fbd82f8635769e2f32ec9daf6468286ec57ec0313efa"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:017bd46f9e7b78e81606329d07141d3da446f8798c6baeec124260e22c262772"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3e5c6727a57cb6558592a95019e504f605d1c54eb86463ee9f7a2dbd411c820"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:57df59d8b48feb0e613d9b1f5e57b7532e97cbaf0d61f7aa9aa32221e84bc4b6"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:55502bc2c653ed2e9692e8c55cb95b397d33f9f2911e929dc97c4d6b26d04242"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4a968a72422a097b09042d5fa2c5c590251ad484acf910a651b4b620acd7f193"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b45649628d816c030dba3c80f8e2689bab1c89518ed10d426036cdc47874dfc4"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea721dd3203b809039fcc2983f14608dae82b212288b346e0bfe46ec2fab0b7c"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ae676de143db2b2f60a9696d7eca5bb9d0dd6cc3ac3dad59a8ae7e95f9e1b54"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17d4e97258b0172dfa107b89aa1eeba3016f4b1974ce85ca3ef6a66b35cbf659"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:05e4b5f86e621cf3927631789999e697e58f0d2d32675b67d9ca9eb0bca55743"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:286322a90bea1f9422a470d5d2ad82d38080be0a29c4dd9b3e6384320a4d11e7"}, + {file = "uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx_rtd_theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=6.1,<7.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=25.3.0,<25.4.0)", "pycodestyle (>=2.11.0,<2.12.0)"] + +[[package]] +name = "virtualenv" +version = "20.35.4" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b"}, + {file = "virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "watchfiles" +version = "1.1.1" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c"}, + {file = "watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4"}, + {file = "watchfiles-1.1.1-cp310-cp310-win32.whl", hash = "sha256:3f6d37644155fb5beca5378feb8c1708d5783145f2a0f1c4d5a061a210254844"}, + {file = "watchfiles-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:a36d8efe0f290835fd0f33da35042a1bb5dc0e83cbc092dcf69bce442579e88e"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10"}, + {file = "watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43"}, + {file = "watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374"}, + {file = "watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81"}, + {file = "watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c882d69f6903ef6092bedfb7be973d9319940d56b8427ab9187d1ecd73438a70"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6ff426a7cb54f310d51bfe83fe9f2bbe40d540c741dc974ebc30e6aa238f52e"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79ff6c6eadf2e3fc0d7786331362e6ef1e51125892c75f1004bd6b52155fb956"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1f5210f1b8fc91ead1283c6fd89f70e76fb07283ec738056cf34d51e9c1d62c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9c4702f29ca48e023ffd9b7ff6b822acdf47cb1ff44cb490a3f1d5ec8987e9c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb08650863767cbc58bca4813b92df4d6c648459dcaa3d4155681962b2aa2d3"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08af70fd77eee58549cd69c25055dc344f918d992ff626068242259f98d598a2"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c3631058c37e4a0ec440bf583bc53cdbd13e5661bb6f465bc1d88ee9a0a4d02"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cf57a27fb986c6243d2ee78392c503826056ffe0287e8794503b10fb51b881be"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d7e7067c98040d646982daa1f37a33d3544138ea155536c2e0e63e07ff8a7e0f"}, + {file = "watchfiles-1.1.1-cp39-cp39-win32.whl", hash = "sha256:6c9c9262f454d1c4d8aaa7050121eb4f3aea197360553699520767daebf2180b"}, + {file = "watchfiles-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:74472234c8370669850e1c312490f6026d132ca2d396abfad8830b4f1c096957"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdab464fee731e0884c35ae3588514a9bcf718d0e2c82169c1c4a85cc19c3c7f"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3dbd8cbadd46984f802f6d479b7e3afa86c42d13e8f0f322d669d79722c8ec34"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5524298e3827105b61951a29c3512deb9578586abf3a7c5da4a8069df247cccc"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b943d3668d61cfa528eb949577479d3b077fd25fb83c641235437bc0b5bc60e"}, + {file = "watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "wcwidth" +version = "0.2.14" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}, + {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, +] + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + +[metadata] +lock-version = "2.1" +python-versions = "^3.12" +content-hash = "914aa2d42a654e9ec1e14d9ef4b98a1dd29f5ef5a440cb981dd524170c5bc86d" diff --git a/scripts/init-db.sql b/scripts/init-db.sql new file mode 100644 index 0000000..13ef36b --- /dev/null +++ b/scripts/init-db.sql @@ -0,0 +1,6 @@ +-- Create test database +CREATE DATABASE test_db; + +-- Grant privileges to postgres user (already has superuser, but being explicit) +GRANT ALL PRIVILEGES ON DATABASE app_db TO postgres; +GRANT ALL PRIVILEGES ON DATABASE test_db TO postgres; From 1e5e887d0508cbd6554aa884146b06baef6fceac Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 14:11:51 +0100 Subject: [PATCH 08/17] Add Bruno collection for testing API in dev Check how API works by testing with Bruno Created collection with instructions for every REST method --- bruno/README.md | 95 +++++++++++++++++++ .../Create Server (Invalid IP).bru | 25 +++++ .../Create Server (Offline).bru | 24 +++++ bruno/Server-Inventory-API/Create Server.bru | 20 ++++ bruno/Server-Inventory-API/Delete Server.bru | 15 +++ .../Server-Inventory-API/Get Server by ID.bru | 15 +++ bruno/Server-Inventory-API/Health Check.bru | 11 +++ bruno/Server-Inventory-API/List Servers.bru | 16 ++++ bruno/Server-Inventory-API/Update Server.bru | 23 +++++ bruno/Server-Inventory-API/bruno.json | 9 ++ .../environments/local.bru | 3 + 11 files changed, 256 insertions(+) create mode 100644 bruno/README.md create mode 100644 bruno/Server-Inventory-API/Create Server (Invalid IP).bru create mode 100644 bruno/Server-Inventory-API/Create Server (Offline).bru create mode 100644 bruno/Server-Inventory-API/Create Server.bru create mode 100644 bruno/Server-Inventory-API/Delete Server.bru create mode 100644 bruno/Server-Inventory-API/Get Server by ID.bru create mode 100644 bruno/Server-Inventory-API/Health Check.bru create mode 100644 bruno/Server-Inventory-API/List Servers.bru create mode 100644 bruno/Server-Inventory-API/Update Server.bru create mode 100644 bruno/Server-Inventory-API/bruno.json create mode 100644 bruno/Server-Inventory-API/environments/local.bru diff --git a/bruno/README.md b/bruno/README.md new file mode 100644 index 0000000..763a9e0 --- /dev/null +++ b/bruno/README.md @@ -0,0 +1,95 @@ +# Bruno API Collection + +This directory contains a Bruno API collection for testing the Server Inventory Management API. + +## What is Bruno? + +Bruno is an open-source API client (alternative to Postman/Insomnia) that stores collections as files in your repository. Download it from: https://www.usebruno.com/ + +## How to Use + +1. **Install Bruno** + - Download from https://www.usebruno.com/downloads + - Or install via Homebrew: `brew install bruno` + +2. **Open the Collection** + - Launch Bruno + - Click "Open Collection" + - Navigate to this `bruno/Server-Inventory-API` directory + - Select the folder + +3. **Select Environment** + - In Bruno, select the "local" environment from the dropdown (top-right) + - This sets `baseUrl` to `http://localhost:8000` + +4. **Start the API Server** + ```bash + # Make sure database is running + make docker-up + + # Start the API server + make run + ``` + +5. **Test the Endpoints** + - Click on any request in the left sidebar + - Click "Send" to execute the request + - View the response below + +## Available Requests + +### Health & Status +- **Health Check** - Check if API is running + +### Server Management +- **Create Server** - Create a new server with all fields +- **Create Server (Offline)** - Example with offline state +- **Create Server (Invalid IP)** - Test validation errors +- **List Servers** - Get all servers with pagination +- **Get Server by ID** - Get a specific server (change ID in URL) +- **Update Server** - Update server fields (change ID in URL) +- **Delete Server** - Delete a server (change ID in URL) + +## Testing Workflow + +1. **Create a server** + - Run "Create Server" request + - Note the `id` in the response + +2. **Get the server** + - Run "Get Server by ID" + - Replace `1` in URL with the actual server ID + +3. **Update the server** + - Run "Update Server" + - Replace `1` in URL with the actual server ID + - Modify the body fields as needed + +4. **List all servers** + - Run "List Servers" + - Adjust `skip` and `limit` query parameters for pagination + +5. **Delete the server** + - Run "Delete Server" + - Replace `1` in URL with the actual server ID + +## Server States + +Valid server states: +- `active` - Server is running +- `offline` - Server is stopped +- `retired` - Server is decommissioned + +## Validation Rules + +- **hostname**: Required, unique, max 255 characters +- **ip_address**: Required, must be valid IPv4 or IPv6 address +- **name**: Optional, max 255 characters +- **state**: Required, must be one of: active, offline, retired + +## Tips + +- Use Bruno's "Environment Variables" to store dynamic values like server IDs +- Use the "Tests" tab to add assertions +- Use "Pre-request Script" to generate dynamic data +- Collections are version-controlled with your code! diff --git a/bruno/Server-Inventory-API/Create Server (Invalid IP).bru b/bruno/Server-Inventory-API/Create Server (Invalid IP).bru new file mode 100644 index 0000000..f2dde88 --- /dev/null +++ b/bruno/Server-Inventory-API/Create Server (Invalid IP).bru @@ -0,0 +1,25 @@ +meta { + name: Create Server (Invalid IP) + type: http + seq: 8 +} + +post { + url: {{baseUrl}}/servers/ + body: json + auth: none +} + +body:json { + { + "hostname": "test-server", + "ip_address": "999.999.999.999", + "name": "Test Server", + "state": "active" + } +} + +docs { + This should return a 422 validation error because the IP address is invalid. + Use this to test API error handling. +} diff --git a/bruno/Server-Inventory-API/Create Server (Offline).bru b/bruno/Server-Inventory-API/Create Server (Offline).bru new file mode 100644 index 0000000..d7c3a2a --- /dev/null +++ b/bruno/Server-Inventory-API/Create Server (Offline).bru @@ -0,0 +1,24 @@ +meta { + name: Create Server (Offline) + type: http + seq: 7 +} + +post { + url: {{baseUrl}}/servers/ + body: json + auth: none +} + +body:json { + { + "hostname": "db-server-01", + "ip_address": "192.168.1.101", + "name": "Database Server 1", + "state": "offline" + } +} + +docs { + Example of creating a server in offline state +} diff --git a/bruno/Server-Inventory-API/Create Server.bru b/bruno/Server-Inventory-API/Create Server.bru new file mode 100644 index 0000000..7b1afef --- /dev/null +++ b/bruno/Server-Inventory-API/Create Server.bru @@ -0,0 +1,20 @@ +meta { + name: Create Server + type: http + seq: 2 +} + +post { + url: {{baseUrl}}/servers/ + body: json + auth: none +} + +body:json { + { + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active" + } +} diff --git a/bruno/Server-Inventory-API/Delete Server.bru b/bruno/Server-Inventory-API/Delete Server.bru new file mode 100644 index 0000000..07e34ee --- /dev/null +++ b/bruno/Server-Inventory-API/Delete Server.bru @@ -0,0 +1,15 @@ +meta { + name: Delete Server + type: http + seq: 6 +} + +delete { + url: {{baseUrl}}/servers/1 + body: none + auth: none +} + +docs { + Replace '1' in the URL with the actual server ID +} diff --git a/bruno/Server-Inventory-API/Get Server by ID.bru b/bruno/Server-Inventory-API/Get Server by ID.bru new file mode 100644 index 0000000..abb9810 --- /dev/null +++ b/bruno/Server-Inventory-API/Get Server by ID.bru @@ -0,0 +1,15 @@ +meta { + name: Get Server by ID + type: http + seq: 4 +} + +get { + url: {{baseUrl}}/servers/1 + body: none + auth: none +} + +docs { + Replace '1' in the URL with the actual server ID +} diff --git a/bruno/Server-Inventory-API/Health Check.bru b/bruno/Server-Inventory-API/Health Check.bru new file mode 100644 index 0000000..7291827 --- /dev/null +++ b/bruno/Server-Inventory-API/Health Check.bru @@ -0,0 +1,11 @@ +meta { + name: Health Check + type: http + seq: 1 +} + +get { + url: {{baseUrl}}/health + body: none + auth: none +} diff --git a/bruno/Server-Inventory-API/List Servers.bru b/bruno/Server-Inventory-API/List Servers.bru new file mode 100644 index 0000000..2cfc765 --- /dev/null +++ b/bruno/Server-Inventory-API/List Servers.bru @@ -0,0 +1,16 @@ +meta { + name: List Servers + type: http + seq: 3 +} + +get { + url: {{baseUrl}}/servers/?skip=0&limit=100 + body: none + auth: none +} + +params:query { + skip: 0 + limit: 100 +} diff --git a/bruno/Server-Inventory-API/Update Server.bru b/bruno/Server-Inventory-API/Update Server.bru new file mode 100644 index 0000000..c90bb3b --- /dev/null +++ b/bruno/Server-Inventory-API/Update Server.bru @@ -0,0 +1,23 @@ +meta { + name: Update Server + type: http + seq: 5 +} + +put { + url: {{baseUrl}}/servers/1 + body: json + auth: none +} + +body:json { + { + "name": "Updated Web Server", + "state": "offline" + } +} + +docs { + Replace '1' in the URL with the actual server ID. + All fields are optional - only send the fields you want to update. +} diff --git a/bruno/Server-Inventory-API/bruno.json b/bruno/Server-Inventory-API/bruno.json new file mode 100644 index 0000000..4713def --- /dev/null +++ b/bruno/Server-Inventory-API/bruno.json @@ -0,0 +1,9 @@ +{ + "version": "1", + "name": "Server Inventory API", + "type": "collection", + "ignore": [ + "node_modules", + ".git" + ] +} diff --git a/bruno/Server-Inventory-API/environments/local.bru b/bruno/Server-Inventory-API/environments/local.bru new file mode 100644 index 0000000..2de9da6 --- /dev/null +++ b/bruno/Server-Inventory-API/environments/local.bru @@ -0,0 +1,3 @@ +vars { + baseUrl: http://localhost:8000 +} From 78c2b8da6d4868bdaf58f0b0b6637bd8e6269476 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 14:27:20 +0100 Subject: [PATCH 09/17] Fix cleanup between tests Clean up database between tests --- tests/conftest.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 4b7d071..c70991e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,8 +45,19 @@ def clean_db(test_db): yield test_db -@pytest.fixture(scope="module") -def client(): - """Create a test client.""" +@pytest.fixture(scope="function") +def client(clean_db): + """Create a test client with clean database.""" with TestClient(app) as test_client: yield test_client + + +@pytest.fixture +def sample_server_data(): + """Sample server data for testing.""" + return { + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active" + } From eda37422b018de925442435add935a6205276012 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 14:30:36 +0100 Subject: [PATCH 10/17] Fix some depreciation warnings Fix main app according to the warnings and filter warnings im pytest --- app/main.py | 31 +++++++++++++++++-------------- pyproject.toml | 5 +++++ 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/app/main.py b/app/main.py index 328ee75..4f488ae 100644 --- a/app/main.py +++ b/app/main.py @@ -1,17 +1,34 @@ """ Main FastAPI application entry point. """ +from contextlib import asynccontextmanager from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from app.core.config import settings from app.api import servers, health + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Application lifespan events.""" + # Startup + from app.db.database import init_db + init_db() + + yield + + # Shutdown + from app.db.database import close_db + close_db() + + app = FastAPI( title=settings.PROJECT_NAME, version=settings.VERSION, description=settings.DESCRIPTION, debug=settings.DEBUG, + lifespan=lifespan, ) # Configure CORS (conditional) @@ -27,17 +44,3 @@ # Include routers app.include_router(health.router, tags=["health"]) app.include_router(servers.router, prefix="/servers", tags=["servers"]) - - -@app.on_event("startup") -async def startup_event(): - """Run on application startup.""" - from app.db.database import init_db - init_db() - - -@app.on_event("shutdown") -async def shutdown_event(): - """Run on application shutdown.""" - from app.db.database import close_db - close_db() diff --git a/pyproject.toml b/pyproject.toml index 63dd0be..d387f06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,11 @@ markers = [ env = [ "APP_ENV=testing" ] +asyncio_mode = "auto" +filterwarnings = [ + "ignore::DeprecationWarning", + "ignore::PendingDeprecationWarning", +] # Black code formatter configuration [tool.black] From 8beae4818972427e3385df7fe03b2a321db13630 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 14:47:14 +0100 Subject: [PATCH 11/17] Add TestContainers for testing Use TestContainers for postgres so we dont have to worry about testing database --- Makefile | 6 +- TESTING.md | 299 +++++++++++++++++++++++++++++++++++ poetry.lock | 393 +++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + tests/conftest.py | 19 ++- 5 files changed, 708 insertions(+), 10 deletions(-) create mode 100644 TESTING.md diff --git a/Makefile b/Makefile index f842054..b5c27b2 100644 --- a/Makefile +++ b/Makefile @@ -20,8 +20,8 @@ help: @echo " make run - Run the development server (default: development)" @echo " make run-dev - Run the development server (APP_ENV=development)" @echo " make run-prod - Run the production server (APP_ENV=production)" - @echo " make test - Run tests (APP_ENV=testing)" - @echo " make test-cov - Run tests with coverage report" + @echo " make test - Run tests with testcontainers (requires Docker)" + @echo " make test-cov - Run tests with coverage report (requires Docker)" @echo "" @echo "Code quality commands:" @echo " make format - Format code with ruff, black and isort" @@ -153,9 +153,11 @@ run-prod: APP_ENV=production poetry run uvicorn app.main:app test: + @echo "🧪 Running tests with testcontainers (Docker required)..." APP_ENV=testing poetry run pytest test-cov: + @echo "🧪 Running tests with coverage report (Docker required)..." poetry run pytest --cov=app --cov-report=html --cov-report=term format: diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 0000000..d3f3688 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,299 @@ +# Testing Guide + +This project uses pytest with testcontainers for isolated database testing. + +## Prerequisites + +- **Docker**: Testcontainers requires Docker to be running +- **Python 3.12+**: Installed via Homebrew or your preferred method +- **Poetry**: For dependency management + +## Quick Start + +```bash +# Make sure Docker is running +docker ps + +# Run all tests +make test + +# Run tests with coverage report +make test-cov + +# Run specific test file +poetry run pytest tests/test_server_api.py -v + +# Run specific test +poetry run pytest tests/test_server_api.py::test_create_server -v +``` + +## How Testcontainers Works + +### What is Testcontainers? + +Testcontainers is a Python library that provides lightweight, throwaway instances of databases, message brokers, and other services for testing. Each test run gets a completely fresh PostgreSQL database in an isolated Docker container. + +### Benefits + +1. **Isolation**: Each test run uses a fresh database - no shared state +2. **Consistency**: Same database version across all environments (CI/CD, local, etc.) +3. **No Setup Required**: No need to install PostgreSQL locally +4. **Parallel Testing**: Can run multiple test suites in parallel safely +5. **Clean State**: Database is destroyed after tests complete + +### How It Works in This Project + +When you run tests: + +1. **Container Startup** (once per test session): + ```python + @pytest.fixture(scope="session") + def postgres_container(): + with PostgresContainer("postgres:16-alpine") as postgres: + yield postgres + ``` + - Testcontainers pulls `postgres:16-alpine` image (if not cached) + - Starts a PostgreSQL container + - Waits for PostgreSQL to be ready + +2. **Database Setup** (once per test session): + ```python + @pytest.fixture(scope="session") + def test_db(postgres_container): + db_url = postgres_container.get_connection_url() + db = Database(db_url) + create_tables() + yield db + drop_tables() + db.close() + ``` + +3. **Clean Database** (before each test): + ```python + @pytest.fixture(scope="function") + def clean_db(test_db): + drop_tables() + create_tables() + yield test_db + ``` + - Each test gets a fresh database schema + +4. **Container Cleanup** (after all tests): + - Container is automatically stopped and removed + - No cleanup needed! + +## Test Structure + +``` +tests/ +├── conftest.py # Pytest fixtures and configuration +└── test_server_api.py # Server API integration tests +``` + +## Test Markers + +Tests are marked with pytest markers for categorization: + +- `@pytest.mark.integration` - Integration tests (test full API) +- `@pytest.mark.unit` - Unit tests (test individual functions) +- `@pytest.mark.slow` - Slow-running tests + +Run specific marker: +```bash +# Run only integration tests +poetry run pytest -m integration + +# Run only unit tests +poetry run pytest -m unit + +# Skip slow tests +poetry run pytest -m "not slow" +``` + +## Writing Tests + +### Basic Test Pattern + +```python +@pytest.mark.integration +def test_create_server(client, sample_server_data): + """Test creating a server.""" + response = client.post("/servers/", json=sample_server_data) + assert response.status_code == 201 + + data = response.json() + assert data["hostname"] == sample_server_data["hostname"] + assert "id" in data +``` + +### Available Fixtures + +- `client`: FastAPI TestClient with clean database +- `sample_server_data`: Sample server data dictionary +- `clean_db`: Direct database access (if needed) +- `test_db`: Database instance (session-scoped) +- `postgres_container`: PostgreSQL container instance + +## Debugging Tests + +### View Test Output + +```bash +# Verbose output +poetry run pytest -v + +# Show print statements +poetry run pytest -s + +# Stop on first failure +poetry run pytest -x + +# Show local variables on failure +poetry run pytest -l +``` + +### Debug Specific Test + +```bash +# Run single test with full output +poetry run pytest tests/test_server_api.py::test_create_server -v -s + +# Run with Python debugger +poetry run pytest tests/test_server_api.py::test_create_server --pdb +``` + +### Check Container Status + +If tests are hanging, check Docker: + +```bash +# List running containers +docker ps + +# View testcontainer logs +docker logs + +# Clean up orphaned containers +docker ps -a | grep testcontainers +docker rm -f +``` + +## Continuous Integration + +Testcontainers works seamlessly in CI/CD environments: + +### GitHub Actions Example + +```yaml +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.12' + + - name: Install Poetry + run: pip install poetry + + - name: Install dependencies + run: poetry install + + - name: Run tests + run: poetry run pytest -v + # Docker is available by default in GitHub Actions +``` + +## Troubleshooting + +### "Docker not available" Error + +**Problem**: `docker.errors.DockerException: Error while fetching server API version` + +**Solution**: Make sure Docker Desktop is running: +```bash +# Check Docker is running +docker ps + +# Start Docker Desktop (macOS) +open -a Docker +``` + +### "Port already in use" Error + +**Problem**: Another PostgreSQL instance is using the port + +**Solution**: Testcontainers automatically assigns random ports, so this should not happen. If it does: +```bash +# Stop local PostgreSQL +brew services stop postgresql@16 + +# Or kill the process +lsof -i :5432 +kill -9 +``` + +### Slow First Run + +**Problem**: First test run is slow + +**Explanation**: Docker is pulling the `postgres:16-alpine` image (~80MB). Subsequent runs use the cached image and are much faster. + +**Speed it up**: +```bash +# Pre-pull the image +docker pull postgres:16-alpine +``` + +### Tests Hang on Exit + +**Problem**: Tests pass but pytest hangs + +**Solution**: Usually a Docker cleanup issue. Force stop: +```bash +# Ctrl+C to stop pytest +# Clean up containers +docker ps -a | grep testcontainers +docker rm -f $(docker ps -a -q --filter "label=org.testcontainers") +``` + +## Performance Tips + +1. **Reuse Container**: Tests use `scope="session"` to reuse the container across all tests +2. **Parallel Tests**: Use `pytest-xdist` for parallel test execution + ```bash + poetry add --group dev pytest-xdist + poetry run pytest -n auto # Run tests in parallel + ``` +3. **Pre-pull Images**: Pull Docker images before test runs +4. **Use Alpine Images**: We use `postgres:16-alpine` which is smaller and faster + +## Alternative: Local PostgreSQL + +If you prefer to use local PostgreSQL instead of testcontainers: + +1. Update `tests/conftest.py`: + ```python + @pytest.fixture(scope="session") + def test_db(): + db = Database("postgresql://postgres:postgres@localhost:5432/test_db") + create_tables() + yield db + drop_tables() + db.close() + ``` + +2. Remove testcontainers dependency: + ```bash + poetry remove --group dev testcontainers + ``` + +3. Create test database: + ```bash + createdb test_db + ``` + +However, testcontainers is recommended for better isolation and consistency. diff --git a/poetry.lock b/poetry.lock index 5571733..79c76d0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -193,6 +193,129 @@ files = [ {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, ] +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + [[package]] name = "click" version = "8.3.1" @@ -371,6 +494,29 @@ idna = ["idna (>=3.10)"] trio = ["trio (>=0.30)"] wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "email-validator" version = "2.3.0" @@ -1454,6 +1600,37 @@ files = [ [package.extras] dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +[[package]] +name = "pywin32" +version = "311" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, + {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, + {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, + {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, + {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, + {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, + {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, + {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, + {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, + {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, + {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, + {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, + {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, + {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, + {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, + {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, + {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, + {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, + {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, + {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1537,6 +1714,28 @@ files = [ {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "ruff" version = "0.1.15" @@ -1614,6 +1813,60 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "testcontainers" +version = "4.13.3" +description = "Python library for throwaway instances of anything that can run in a Docker container" +optional = false +python-versions = ">=3.9.2" +groups = ["dev"] +files = [ + {file = "testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970"}, + {file = "testcontainers-4.13.3.tar.gz", hash = "sha256:9d82a7052c9a53c58b69e1dc31da8e7a715e8b3ec1c4df5027561b47e2efe646"}, +] + +[package.dependencies] +docker = "*" +python-dotenv = "*" +typing-extensions = "*" +urllib3 = "*" +wrapt = "*" + +[package.extras] +arangodb = ["python-arango (>=7.8,<8.0)"] +aws = ["boto3", "httpx"] +azurite = ["azure-storage-blob (>=12.19,<13.0)"] +chroma = ["chromadb-client (>=1.0.0,<2.0.0)"] +cosmosdb = ["azure-cosmos"] +db2 = ["ibm_db_sa ; platform_machine != \"aarch64\" and platform_machine != \"arm64\"", "sqlalchemy"] +generic = ["httpx", "redis"] +google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] +influxdb = ["influxdb", "influxdb-client"] +k3s = ["kubernetes", "pyyaml (>=6.0.3)"] +keycloak = ["python-keycloak"] +localstack = ["boto3"] +mailpit = ["cryptography"] +minio = ["minio"] +mongodb = ["pymongo"] +mssql = ["pymssql (>=2.3.9) ; platform_machine != \"arm64\" or python_version >= \"3.10\"", "sqlalchemy"] +mysql = ["pymysql[rsa]", "sqlalchemy"] +nats = ["nats-py"] +neo4j = ["neo4j"] +openfga = ["openfga-sdk ; python_version >= \"3.10\""] +opensearch = ["opensearch-py ; python_version < \"4.0\""] +oracle = ["oracledb (>=3.4.1)", "sqlalchemy"] +oracle-free = ["oracledb (>=3.4.1)", "sqlalchemy"] +qdrant = ["qdrant-client"] +rabbitmq = ["pika"] +redis = ["redis"] +registry = ["bcrypt"] +scylla = ["cassandra-driver (==3.29.1)"] +selenium = ["selenium"] +sftp = ["cryptography"] +test-module-import = ["httpx"] +trino = ["trino"] +weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] + [[package]] name = "traitlets" version = "5.14.3" @@ -1657,6 +1910,24 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" +[[package]] +name = "urllib3" +version = "2.6.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd"}, + {file = "urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + [[package]] name = "uvicorn" version = "0.27.1" @@ -1982,7 +2253,127 @@ files = [ {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] +[[package]] +name = "wrapt" +version = "2.0.1" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:64b103acdaa53b7caf409e8d45d39a8442fe6dcfec6ba3f3d141e0cc2b5b4dbd"}, + {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91bcc576260a274b169c3098e9a3519fb01f2989f6d3d386ef9cbf8653de1374"}, + {file = "wrapt-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab594f346517010050126fcd822697b25a7031d815bb4fbc238ccbe568216489"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:36982b26f190f4d737f04a492a68accbfc6fa042c3f42326fdfbb6c5b7a20a31"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23097ed8bc4c93b7bf36fa2113c6c733c976316ce0ee2c816f64ca06102034ef"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bacfe6e001749a3b64db47bcf0341da757c95959f592823a93931a422395013"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8ec3303e8a81932171f455f792f8df500fc1a09f20069e5c16bd7049ab4e8e38"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:3f373a4ab5dbc528a94334f9fe444395b23c2f5332adab9ff4ea82f5a9e33bc1"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f49027b0b9503bf6c8cdc297ca55006b80c2f5dd36cecc72c6835ab6e10e8a25"}, + {file = "wrapt-2.0.1-cp310-cp310-win32.whl", hash = "sha256:8330b42d769965e96e01fa14034b28a2a7600fbf7e8f0cc90ebb36d492c993e4"}, + {file = "wrapt-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:1218573502a8235bb8a7ecaed12736213b22dcde9feab115fa2989d42b5ded45"}, + {file = "wrapt-2.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:eda8e4ecd662d48c28bb86be9e837c13e45c58b8300e43ba3c9b4fa9900302f7"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0e17283f533a0d24d6e5429a7d11f250a58d28b4ae5186f8f47853e3e70d2590"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85df8d92158cb8f3965aecc27cf821461bb5f40b450b03facc5d9f0d4d6ddec6"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1be685ac7700c966b8610ccc63c3187a72e33cab53526a27b2a285a662cd4f7"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:df0b6d3b95932809c5b3fecc18fda0f1e07452d05e2662a0b35548985f256e28"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da7384b0e5d4cae05c97cd6f94faaf78cc8b0f791fc63af43436d98c4ab37bb"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ec65a78fbd9d6f083a15d7613b2800d5663dbb6bb96003899c834beaa68b242c"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7de3cc939be0e1174969f943f3b44e0d79b6f9a82198133a5b7fc6cc92882f16"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fb1a5b72cbd751813adc02ef01ada0b0d05d3dcbc32976ce189a1279d80ad4a2"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3fa272ca34332581e00bf7773e993d4f632594eb2d1b0b162a9038df0fd971dd"}, + {file = "wrapt-2.0.1-cp311-cp311-win32.whl", hash = "sha256:fc007fdf480c77301ab1afdbb6ab22a5deee8885f3b1ed7afcb7e5e84a0e27be"}, + {file = "wrapt-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:47434236c396d04875180171ee1f3815ca1eada05e24a1ee99546320d54d1d1b"}, + {file = "wrapt-2.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:837e31620e06b16030b1d126ed78e9383815cbac914693f54926d816d35d8edf"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb"}, + {file = "wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9"}, + {file = "wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75"}, + {file = "wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b"}, + {file = "wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7"}, + {file = "wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3"}, + {file = "wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e"}, + {file = "wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c"}, + {file = "wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92"}, + {file = "wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5e53b428f65ece6d9dad23cb87e64506392b720a0b45076c05354d27a13351a1"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ad3ee9d0f254851c71780966eb417ef8e72117155cff04821ab9b60549694a55"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d7b822c61ed04ee6ad64bc90d13368ad6eb094db54883b5dde2182f67a7f22c0"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7164a55f5e83a9a0b031d3ffab4d4e36bbec42e7025db560f225489fa929e509"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e60690ba71a57424c8d9ff28f8d006b7ad7772c22a4af432188572cd7fa004a1"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3cd1a4bd9a7a619922a8557e1318232e7269b5fb69d4ba97b04d20450a6bf970"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4c2e3d777e38e913b8ce3a6257af72fb608f86a1df471cb1d4339755d0a807c"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3d366aa598d69416b5afedf1faa539fac40c1d80a42f6b236c88c73a3c8f2d41"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c235095d6d090aa903f1db61f892fffb779c1eaeb2a50e566b52001f7a0f66ed"}, + {file = "wrapt-2.0.1-cp314-cp314-win32.whl", hash = "sha256:bfb5539005259f8127ea9c885bdc231978c06b7a980e63a8a61c8c4c979719d0"}, + {file = "wrapt-2.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:4ae879acc449caa9ed43fc36ba08392b9412ee67941748d31d94e3cedb36628c"}, + {file = "wrapt-2.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:8639b843c9efd84675f1e100ed9e99538ebea7297b62c4b45a7042edb84db03e"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:9219a1d946a9b32bb23ccae66bdb61e35c62773ce7ca6509ceea70f344656b7b"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fa4184e74197af3adad3c889a1af95b53bb0466bced92ea99a0c014e48323eec"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c5ef2f2b8a53b7caee2f797ef166a390fef73979b15778a4a153e4b5fedce8fa"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e042d653a4745be832d5aa190ff80ee4f02c34b21f4b785745eceacd0907b815"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2afa23318136709c4b23d87d543b425c399887b4057936cd20386d5b1422b6fa"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6c72328f668cf4c503ffcf9434c2b71fdd624345ced7941bc6693e61bbe36bef"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3793ac154afb0e5b45d1233cb94d354ef7a983708cc3bb12563853b1d8d53747"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fec0d993ecba3991645b4857837277469c8cc4c554a7e24d064d1ca291cfb81f"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:949520bccc1fa227274da7d03bf238be15389cd94e32e4297b92337df9b7a349"}, + {file = "wrapt-2.0.1-cp314-cp314t-win32.whl", hash = "sha256:be9e84e91d6497ba62594158d3d31ec0486c60055c49179edc51ee43d095f79c"}, + {file = "wrapt-2.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:61c4956171c7434634401db448371277d07032a81cc21c599c22953374781395"}, + {file = "wrapt-2.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:35cdbd478607036fee40273be8ed54a451f5f23121bd9d4be515158f9498f7ad"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:90897ea1cf0679763b62e79657958cd54eae5659f6360fc7d2ccc6f906342183"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50844efc8cdf63b2d90cd3d62d4947a28311e6266ce5235a219d21b195b4ec2c"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49989061a9977a8cbd6d20f2efa813f24bf657c6990a42967019ce779a878dbf"}, + {file = "wrapt-2.0.1-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:09c7476ab884b74dce081ad9bfd07fe5822d8600abade571cb1f66d5fc915af6"}, + {file = "wrapt-2.0.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1a8a09a004ef100e614beec82862d11fc17d601092c3599afd22b1f36e4137e"}, + {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:89a82053b193837bf93c0f8a57ded6e4b6d88033a499dadff5067e912c2a41e9"}, + {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f26f8e2ca19564e2e1fdbb6a0e47f36e0efbab1acc31e15471fad88f828c75f6"}, + {file = "wrapt-2.0.1-cp38-cp38-win32.whl", hash = "sha256:115cae4beed3542e37866469a8a1f2b9ec549b4463572b000611e9946b86e6f6"}, + {file = "wrapt-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c4012a2bd37059d04f8209916aa771dfb564cccb86079072bdcd48a308b6a5c5"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:68424221a2dc00d634b54f92441914929c5ffb1c30b3b837343978343a3512a3"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6bd1a18f5a797fe740cb3d7a0e853a8ce6461cc62023b630caec80171a6b8097"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb3a86e703868561c5cad155a15c36c716e1ab513b7065bd2ac8ed353c503333"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5dc1b852337c6792aa111ca8becff5bacf576bf4a0255b0f05eb749da6a1643e"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c046781d422f0830de6329fa4b16796096f28a92c8aef3850674442cdcb87b7f"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f73f9f7a0ebd0db139253d27e5fc8d2866ceaeef19c30ab5d69dcbe35e1a6981"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b667189cf8efe008f55bbda321890bef628a67ab4147ebf90d182f2dadc78790"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:a9a83618c4f0757557c077ef71d708ddd9847ed66b7cc63416632af70d3e2308"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e9b121e9aeb15df416c2c960b8255a49d44b4038016ee17af03975992d03931"}, + {file = "wrapt-2.0.1-cp39-cp39-win32.whl", hash = "sha256:1f186e26ea0a55f809f232e92cc8556a0977e00183c3ebda039a807a42be1494"}, + {file = "wrapt-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bf4cb76f36be5de950ce13e22e7fdf462b35b04665a12b64f3ac5c1bbbcf3728"}, + {file = "wrapt-2.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:d6cc985b9c8b235bd933990cdbf0f891f8e010b65a3911f7a55179cd7b0fc57b"}, + {file = "wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca"}, + {file = "wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f"}, +] + +[package.extras] +dev = ["pytest", "setuptools"] + [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "914aa2d42a654e9ec1e14d9ef4b98a1dd29f5ef5a440cb981dd524170c5bc86d" +content-hash = "856793c26a8ad40900d28931b87d562ca981a31761dcbca8da9846f3b8a9849d" diff --git a/pyproject.toml b/pyproject.toml index d387f06..117ccac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ pytest-asyncio = "^0.23.3" pytest-cov = "^4.1.0" pytest-env = "^1.1.3" httpx = "^0.26.0" +testcontainers = {extras = ["postgresql"], version = "^4.13.3"} # Code quality ruff = "^0.1.11" diff --git a/tests/conftest.py b/tests/conftest.py index c70991e..173ddb4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,7 @@ import os import pytest from fastapi.testclient import TestClient +from testcontainers.postgres import PostgresContainer # Set test environment BEFORE importing app os.environ["APP_ENV"] = "testing" @@ -15,16 +16,20 @@ @pytest.fixture(scope="session") -def test_settings(): - """Get test settings.""" - return get_settings() +def postgres_container(): + """Start a PostgreSQL container for testing.""" + with PostgresContainer("postgres:16-alpine") as postgres: + yield postgres @pytest.fixture(scope="session") -def test_db(test_settings): - """Create a test database.""" - # Use settings object instead of os.getenv - db = Database(test_settings.TEST_DATABASE_URL) +def test_db(postgres_container): + """Create a test database using testcontainer.""" + # Get connection URL from testcontainer + db_url = postgres_container.get_connection_url() + + # Override the database connection for tests + db = Database(db_url) # Drop and create tables drop_tables() From c84d361a8c72a80f06cb0b17a93e0668ad6e9bfd Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 17:23:05 +0100 Subject: [PATCH 12/17] Add CLI code Add CLI code to have same function as API code Add tests - Add CLI tests - Add Server service unit tests --- API.md | 292 ++++++++++++++++++++++++ app/cli.py | 236 ++++++++++++++++++++ poetry.lock | 17 +- pyproject.toml | 5 + tests/test_cli.py | 418 +++++++++++++++++++++++++++++++++++ tests/test_server_service.py | 359 ++++++++++++++++++++++++++++++ 6 files changed, 1326 insertions(+), 1 deletion(-) create mode 100644 API.md create mode 100644 app/cli.py create mode 100644 tests/test_cli.py create mode 100644 tests/test_server_service.py diff --git a/API.md b/API.md new file mode 100644 index 0000000..0c0b483 --- /dev/null +++ b/API.md @@ -0,0 +1,292 @@ +# Server Inventory Management - API & CLI Documentation + +## Running the Application + +### Using Docker Compose + +Start the full stack (API + Database): + +```bash +docker-compose up -d +``` + +The API will be available at `http://localhost:8000` + +### Running Locally + +1. Install dependencies: +```bash +poetry install --with dev +``` + +2. Start PostgreSQL (via Docker Compose or locally) + +3. Run the API server: +```bash +poetry run uvicorn app.main:app --reload +``` + +## API Specification + +Base URL: `http://localhost:8000` + +Interactive API docs: `http://localhost:8000/docs` + +### Endpoints + +#### Create Server +```http +POST /servers +Content-Type: application/json + +{ + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active" +} +``` + +**Response:** `201 Created` +```json +{ + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active", + "created_at": "2024-01-15T10:30:00", + "updated_at": "2024-01-15T10:30:00" +} +``` + +#### List All Servers +```http +GET /servers?skip=0&limit=100 +``` + +**Response:** `200 OK` +```json +[ + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active", + "created_at": "2024-01-15T10:30:00", + "updated_at": "2024-01-15T10:30:00" + } +] +``` + +#### Get Server by ID +```http +GET /servers/{id} +``` + +**Response:** `200 OK` (same format as create response) + +#### Update Server +```http +PUT /servers/{id} +Content-Type: application/json + +{ + "hostname": "web-server-01-updated", + "ip_address": "192.168.1.101", + "name": "Updated Web Server", + "state": "offline" +} +``` + +**Response:** `200 OK` (updated server object) + +**Note:** All fields are optional in update requests + +#### Delete Server +```http +DELETE /servers/{id} +``` + +**Response:** `204 No Content` + +### Validation Rules + +- **hostname**: Must be unique, 1-255 characters +- **ip_address**: Must be a valid IPv4 or IPv6 address +- **state**: One of: `active`, `offline`, `retired` +- **name**: Optional, max 255 characters + +### Error Responses + +**400 Bad Request:** +```json +{ + "detail": "Server with hostname 'web-server-01' already exists" +} +``` + +**404 Not Found:** +```json +{ + "detail": "Server with id 1 not found" +} +``` + +## CLI Specification + +The CLI tool `server-mgmt` provides the same functionality as the API. + +### Installation + +After installing dependencies with Poetry, the CLI is automatically available: + +```bash +poetry install +``` + +### Commands + +#### Create a Server +```bash +poetry run server-mgmt create \ + --hostname web-server-01 \ + --ip-address 192.168.1.100 \ + --name "Web Server 1" \ + --state active +``` + +**Options:** +- `--hostname` (required): Unique server hostname +- `--ip-address` (required): Server IP address +- `--name` (optional): Server name/description +- `--state` (optional): Server state (default: active) + +#### List All Servers +```bash +# Table format (default) +poetry run server-mgmt list + +# JSON format +poetry run server-mgmt list --format json + +# With pagination +poetry run server-mgmt list --skip 10 --limit 20 +``` + +**Options:** +- `--skip`: Number of records to skip (default: 0) +- `--limit`: Maximum records to return (default: 100) +- `--format`: Output format - `table` or `json` (default: table) + +#### Get Server by ID +```bash +# Table format +poetry run server-mgmt get 1 + +# JSON format +poetry run server-mgmt get 1 --format json +``` + +#### Update Server +```bash +poetry run server-mgmt update 1 \ + --hostname web-server-01-updated \ + --ip-address 192.168.1.101 \ + --name "Updated Web Server" \ + --state offline +``` + +**Options:** (all optional, but at least one required) +- `--hostname`: New server hostname +- `--ip-address`: New server IP address +- `--name`: New server name/description +- `--state`: New server state + +#### Delete Server +```bash +# With confirmation prompt +poetry run server-mgmt delete 1 + +# Skip confirmation +poetry run server-mgmt delete 1 --yes +``` + +**Options:** +- `--yes`, `-y`: Skip confirmation prompt + +### CLI Help + +Get help for any command: +```bash +poetry run server-mgmt --help +poetry run server-mgmt create --help +poetry run server-mgmt list --help +# etc. +``` + +## Examples + +### API Example (using curl) + +```bash +# Create a server +curl -X POST http://localhost:8000/servers \ + -H "Content-Type: application/json" \ + -d '{ + "hostname": "db-server-01", + "ip_address": "10.0.1.50", + "name": "Primary Database", + "state": "active" + }' + +# List servers +curl http://localhost:8000/servers + +# Get specific server +curl http://localhost:8000/servers/1 + +# Update server +curl -X PUT http://localhost:8000/servers/1 \ + -H "Content-Type: application/json" \ + -d '{"state": "offline"}' + +# Delete server +curl -X DELETE http://localhost:8000/servers/1 +``` + +### CLI Example + +```bash +# Create a server +poetry run server-mgmt create \ + --hostname db-server-01 \ + --ip-address 10.0.1.50 \ + --name "Primary Database" \ + --state active + +# List all servers +poetry run server-mgmt list + +# Get server details +poetry run server-mgmt get 1 + +# Update server state +poetry run server-mgmt update 1 --state offline + +# Delete server +poetry run server-mgmt delete 1 --yes +``` + +## Testing + +Run the test suite: +```bash +poetry run pytest +``` + +Run with coverage: +```bash +poetry run pytest --cov=app tests/ +``` diff --git a/app/cli.py b/app/cli.py new file mode 100644 index 0000000..19b5d41 --- /dev/null +++ b/app/cli.py @@ -0,0 +1,236 @@ +""" +CLI for server inventory management. +""" +import os +import sys +import click +from typing import Optional +from tabulate import tabulate + +# Ensure we can import app modules +from app.schemas.server import ServerCreate, ServerUpdate, ServerState +from app.services.server_service import ServerService + + +@click.group() +def cli(): + """Server Inventory Management CLI.""" + pass + + +@cli.command() +@click.option("--hostname", required=True, help="Unique server hostname") +@click.option("--ip-address", required=True, help="Server IP address") +@click.option("--name", help="Server name/description") +@click.option( + "--state", + type=click.Choice(["active", "offline", "retired"], case_sensitive=False), + default="active", + help="Server state (default: active)" +) +def create(hostname: str, ip_address: str, name: Optional[str], state: str): + """Create a new server.""" + try: + service = ServerService() + server_data = ServerCreate( + hostname=hostname, + ip_address=ip_address, + name=name, + state=ServerState(state.lower()) + ) + server = service.create_server(server_data) + + click.echo(click.style("\nServer created successfully!", fg="green")) + click.echo(f"ID: {server.id}") + click.echo(f"Hostname: {server.hostname}") + click.echo(f"IP Address: {server.ip_address}") + click.echo(f"Name: {server.name or 'N/A'}") + click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo(f"Created at: {server.created_at}") + + except ValueError as e: + click.echo(click.style(f"Error: {str(e)}", fg="red"), err=True) + sys.exit(1) + except Exception as e: + click.echo(click.style(f"Unexpected error: {str(e)}", fg="red"), err=True) + sys.exit(1) + + +@cli.command() +@click.option("--skip", default=0, help="Number of records to skip") +@click.option("--limit", default=100, help="Maximum number of records to return") +@click.option("--format", "output_format", type=click.Choice(["table", "json"]), default="table", help="Output format") +def list(skip: int, limit: int, output_format: str): + """List all servers.""" + try: + service = ServerService() + servers = service.get_servers(skip=skip, limit=limit) + + if not servers: + click.echo("No servers found.") + return + + if output_format == "json": + import json + servers_data = [ + { + "id": s.id, + "hostname": s.hostname, + "ip_address": s.ip_address, + "name": s.name, + "state": s.state, + "created_at": s.created_at.isoformat(), + "updated_at": s.updated_at.isoformat() + } + for s in servers + ] + click.echo(json.dumps(servers_data, indent=2)) + else: + table_data = [ + [s.id, s.hostname, s.ip_address, s.name or "N/A", s.state, s.created_at] + for s in servers + ] + headers = ["ID", "Hostname", "IP Address", "Name", "State", "Created At"] + click.echo(f"\nFound {len(servers)} server(s):\n") + click.echo(tabulate(table_data, headers=headers, tablefmt="grid")) + + except Exception as e: + click.echo(click.style(f"Error: {str(e)}", fg="red"), err=True) + sys.exit(1) + + +@cli.command() +@click.argument("server_id", type=int) +@click.option("--format", "output_format", type=click.Choice(["table", "json"]), default="table", help="Output format") +def get(server_id: int, output_format: str): + """Get server by ID.""" + try: + service = ServerService() + server = service.get_server(server_id) + + if not server: + click.echo(click.style(f"Server with ID {server_id} not found.", fg="red"), err=True) + sys.exit(1) + + if output_format == "json": + import json + server_data = { + "id": server.id, + "hostname": server.hostname, + "ip_address": server.ip_address, + "name": server.name, + "state": server.state, + "created_at": server.created_at.isoformat(), + "updated_at": server.updated_at.isoformat() + } + click.echo(json.dumps(server_data, indent=2)) + else: + click.echo(f"\nServer Details:\n") + click.echo(f"ID: {server.id}") + click.echo(f"Hostname: {server.hostname}") + click.echo(f"IP Address: {server.ip_address}") + click.echo(f"Name: {server.name or 'N/A'}") + click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo(f"Created at: {server.created_at}") + click.echo(f"Updated at: {server.updated_at}") + + except Exception as e: + click.echo(click.style(f"Error: {str(e)}", fg="red"), err=True) + sys.exit(1) + + +@cli.command() +@click.argument("server_id", type=int) +@click.option("--hostname", help="New server hostname") +@click.option("--ip-address", help="New server IP address") +@click.option("--name", help="New server name/description") +@click.option( + "--state", + type=click.Choice(["active", "offline", "retired"], case_sensitive=False), + help="New server state" +) +def update(server_id: int, hostname: Optional[str], ip_address: Optional[str], name: Optional[str], state: Optional[str]): + """Update server by ID.""" + try: + # Check if at least one field is provided + if not any([hostname, ip_address, name, state]): + click.echo(click.style("Error: At least one field must be provided for update.", fg="red"), err=True) + sys.exit(1) + + service = ServerService() + + # Build update data + update_data = {} + if hostname is not None: + update_data["hostname"] = hostname + if ip_address is not None: + update_data["ip_address"] = ip_address + if name is not None: + update_data["name"] = name + if state is not None: + update_data["state"] = ServerState(state.lower()) + + server_update = ServerUpdate(**update_data) + server = service.update_server(server_id, server_update) + + if not server: + click.echo(click.style(f"Server with ID {server_id} not found.", fg="red"), err=True) + sys.exit(1) + + click.echo(click.style("\nServer updated successfully!", fg="green")) + click.echo(f"ID: {server.id}") + click.echo(f"Hostname: {server.hostname}") + click.echo(f"IP Address: {server.ip_address}") + click.echo(f"Name: {server.name or 'N/A'}") + click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo(f"Updated at: {server.updated_at}") + + except ValueError as e: + click.echo(click.style(f"Error: {str(e)}", fg="red"), err=True) + sys.exit(1) + except Exception as e: + click.echo(click.style(f"Unexpected error: {str(e)}", fg="red"), err=True) + sys.exit(1) + + +@cli.command() +@click.argument("server_id", type=int) +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt") +def delete(server_id: int, yes: bool): + """Delete server by ID.""" + try: + service = ServerService() + + # Get server to show what will be deleted + server = service.get_server(server_id) + if not server: + click.echo(click.style(f"Server with ID {server_id} not found.", fg="red"), err=True) + sys.exit(1) + + # Confirm deletion unless --yes flag is provided + if not yes: + click.echo(f"\nServer to delete:") + click.echo(f"ID: {server.id}") + click.echo(f"Hostname: {server.hostname}") + click.echo(f"IP Address: {server.ip_address}") + click.echo(f"Name: {server.name or 'N/A'}") + click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}\n") + + if not click.confirm("Are you sure you want to delete this server?"): + click.echo("Deletion cancelled.") + sys.exit(0) + + # Delete server + if service.delete_server(server_id): + click.echo(click.style(f"\nServer {server_id} deleted successfully!", fg="green")) + else: + click.echo(click.style(f"Failed to delete server {server_id}.", fg="red"), err=True) + sys.exit(1) + + except Exception as e: + click.echo(click.style(f"Error: {str(e)}", fg="red"), err=True) + sys.exit(1) + + +if __name__ == "__main__": + cli() diff --git a/poetry.lock b/poetry.lock index 79c76d0..48bf012 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1813,6 +1813,21 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + [[package]] name = "testcontainers" version = "4.13.3" @@ -2376,4 +2391,4 @@ dev = ["pytest", "setuptools"] [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "856793c26a8ad40900d28931b87d562ca981a31761dcbca8da9846f3b8a9849d" +content-hash = "f6cc8fba82d2d02ce52fd5f86282d6dcc827c96583d64e7cc0e76adefaa5be89" diff --git a/pyproject.toml b/pyproject.toml index 117ccac..25c3c1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,8 @@ email-validator = "^2.1.0" passlib = {extras = ["bcrypt"], version = "^1.7.4"} python-multipart = "^0.0.6" psycopg2-binary = "^2.9.9" +click = "^8.1.7" +tabulate = "^0.9.0" # Development dependencies (testing, linting, formatting) [tool.poetry.group.dev] @@ -43,6 +45,9 @@ pre-commit = "^3.6.0" ipython = "^8.20.0" python-dotenv = "^1.0.0" +[tool.poetry.scripts] +server-mgmt = "app.cli:cli" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..98cdef2 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,418 @@ +""" +CLI tests. +""" +import pytest +from click.testing import CliRunner +from app.cli import cli + + +@pytest.fixture +def runner(): + """Create a CLI runner.""" + return CliRunner() + + +@pytest.mark.integration +def test_cli_help(runner): + """Test CLI help command.""" + result = runner.invoke(cli, ["--help"]) + assert result.exit_code == 0 + assert "Server Inventory Management CLI" in result.output + assert "create" in result.output + assert "list" in result.output + assert "get" in result.output + assert "update" in result.output + assert "delete" in result.output + + +@pytest.mark.integration +def test_create_server_success(runner, clean_db): + """Test creating a server via CLI.""" + result = runner.invoke(cli, [ + "create", + "--hostname", "cli-web-01", + "--ip-address", "192.168.1.200", + "--name", "CLI Web Server", + "--state", "active" + ]) + assert result.exit_code == 0 + assert "Server created successfully" in result.output + assert "cli-web-01" in result.output + assert "192.168.1.200" in result.output + + +@pytest.mark.integration +def test_create_server_missing_required_field(runner): + """Test creating a server with missing required field.""" + result = runner.invoke(cli, [ + "create", + "--hostname", "incomplete-server" + ]) + assert result.exit_code != 0 + assert "Missing option" in result.output or "required" in result.output.lower() + + +@pytest.mark.integration +def test_create_server_invalid_ip(runner, clean_db): + """Test creating a server with invalid IP address.""" + result = runner.invoke(cli, [ + "create", + "--hostname", "invalid-ip-server", + "--ip-address", "999.999.999.999", + "--state", "active" + ]) + assert result.exit_code == 1 + assert "Error" in result.output + assert "Invalid IP address" in result.output + + +@pytest.mark.integration +def test_create_server_duplicate_hostname(runner, clean_db): + """Test creating a server with duplicate hostname.""" + # Create first server + runner.invoke(cli, [ + "create", + "--hostname", "duplicate-server", + "--ip-address", "192.168.1.100", + "--state", "active" + ]) + + # Try to create second server with same hostname + result = runner.invoke(cli, [ + "create", + "--hostname", "duplicate-server", + "--ip-address", "192.168.1.101", + "--state", "active" + ]) + assert result.exit_code == 1 + assert "Error" in result.output + assert "already exists" in result.output + + +@pytest.mark.integration +def test_create_server_with_all_states(runner, clean_db): + """Test creating servers with all valid states.""" + states = ["active", "offline", "retired"] + + for state in states: + result = runner.invoke(cli, [ + "create", + "--hostname", f"server-{state}", + "--ip-address", f"10.0.0.{states.index(state) + 1}", + "--state", state + ]) + assert result.exit_code == 0 + assert "Server created successfully" in result.output + assert state in result.output + + +@pytest.mark.integration +def test_list_servers_empty(runner, clean_db): + """Test listing servers when database is empty.""" + result = runner.invoke(cli, ["list"]) + assert result.exit_code == 0 + assert "No servers found" in result.output + + +@pytest.mark.integration +def test_list_servers_table_format(runner, clean_db): + """Test listing servers in table format.""" + # Create a few servers + for i in range(3): + runner.invoke(cli, [ + "create", + "--hostname", f"list-server-{i:02d}", + "--ip-address", f"10.0.1.{i+1}", + "--state", "active" + ]) + + result = runner.invoke(cli, ["list"]) + assert result.exit_code == 0 + assert "Found 3 server(s)" in result.output + assert "list-server-00" in result.output + assert "list-server-01" in result.output + assert "list-server-02" in result.output + + +@pytest.mark.integration +def test_list_servers_json_format(runner, clean_db): + """Test listing servers in JSON format.""" + # Create a server + runner.invoke(cli, [ + "create", + "--hostname", "json-server", + "--ip-address", "10.0.2.1", + "--state", "active" + ]) + + result = runner.invoke(cli, ["list", "--format", "json"]) + assert result.exit_code == 0 + assert '"hostname": "json-server"' in result.output + assert '"ip_address": "10.0.2.1"' in result.output + assert '"state": "active"' in result.output + + +@pytest.mark.integration +def test_list_servers_pagination(runner, clean_db): + """Test listing servers with pagination.""" + # Create 5 servers + for i in range(5): + runner.invoke(cli, [ + "create", + "--hostname", f"page-server-{i:02d}", + "--ip-address", f"10.0.3.{i+1}", + "--state", "active" + ]) + + # Test skip and limit + result = runner.invoke(cli, ["list", "--skip", "2", "--limit", "2"]) + assert result.exit_code == 0 + # Should show at most 2 servers, skipping first 2 + + +@pytest.mark.integration +def test_get_server_success(runner, clean_db): + """Test getting a server by ID.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "get-server", + "--ip-address", "10.0.4.1", + "--name", "Get Test Server", + "--state", "active" + ]) + + # Extract ID from create output (hacky but works for test) + # The output should contain "ID: " + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + assert id_match is not None + server_id = id_match.group(1) + + # Get the server + result = runner.invoke(cli, ["get", server_id]) + assert result.exit_code == 0 + assert "Server Details" in result.output + assert "get-server" in result.output + assert "10.0.4.1" in result.output + assert "Get Test Server" in result.output + + +@pytest.mark.integration +def test_get_server_json_format(runner, clean_db): + """Test getting a server in JSON format.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "json-get-server", + "--ip-address", "10.0.4.2", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Get server in JSON format + result = runner.invoke(cli, ["get", server_id, "--format", "json"]) + assert result.exit_code == 0 + assert '"hostname": "json-get-server"' in result.output + assert '"ip_address": "10.0.4.2"' in result.output + + +@pytest.mark.integration +def test_get_server_not_found(runner, clean_db): + """Test getting a non-existent server.""" + result = runner.invoke(cli, ["get", "99999"]) + assert result.exit_code == 1 + assert "not found" in result.output + + +@pytest.mark.integration +def test_update_server_success(runner, clean_db): + """Test updating a server.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "update-server", + "--ip-address", "10.0.5.1", + "--name", "Original Name", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Update the server + result = runner.invoke(cli, [ + "update", + server_id, + "--name", "Updated Name", + "--state", "offline" + ]) + assert result.exit_code == 0 + assert "Server updated successfully" in result.output + assert "Updated Name" in result.output + assert "offline" in result.output + + +@pytest.mark.integration +def test_update_server_hostname(runner, clean_db): + """Test updating server hostname.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "old-hostname", + "--ip-address", "10.0.5.2", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Update hostname + result = runner.invoke(cli, [ + "update", + server_id, + "--hostname", "new-hostname" + ]) + assert result.exit_code == 0 + assert "new-hostname" in result.output + + +@pytest.mark.integration +def test_update_server_no_fields(runner, clean_db): + """Test updating a server without providing any fields.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "no-update-server", + "--ip-address", "10.0.5.3", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Try to update without any fields + result = runner.invoke(cli, ["update", server_id]) + assert result.exit_code == 1 + assert "Error" in result.output + assert "At least one field" in result.output + + +@pytest.mark.integration +def test_update_server_not_found(runner, clean_db): + """Test updating a non-existent server.""" + result = runner.invoke(cli, [ + "update", + "99999", + "--name", "New Name" + ]) + assert result.exit_code == 1 + assert "not found" in result.output + + +@pytest.mark.integration +def test_update_server_invalid_ip(runner, clean_db): + """Test updating a server with invalid IP.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "invalid-update-server", + "--ip-address", "10.0.5.4", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Try to update with invalid IP + result = runner.invoke(cli, [ + "update", + server_id, + "--ip-address", "not-an-ip" + ]) + assert result.exit_code == 1 + assert "Error" in result.output + + +@pytest.mark.integration +def test_delete_server_with_confirmation(runner, clean_db): + """Test deleting a server with confirmation.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "delete-server", + "--ip-address", "10.0.6.1", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Delete with yes confirmation + result = runner.invoke(cli, [ + "delete", + server_id + ], input="y\n") + assert result.exit_code == 0 + assert "deleted successfully" in result.output + + +@pytest.mark.integration +def test_delete_server_skip_confirmation(runner, clean_db): + """Test deleting a server with --yes flag.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "quick-delete-server", + "--ip-address", "10.0.6.2", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Delete with --yes flag + result = runner.invoke(cli, ["delete", server_id, "--yes"]) + assert result.exit_code == 0 + assert "deleted successfully" in result.output + + +@pytest.mark.integration +def test_delete_server_cancel_confirmation(runner, clean_db): + """Test canceling server deletion.""" + # Create a server + create_result = runner.invoke(cli, [ + "create", + "--hostname", "cancel-delete-server", + "--ip-address", "10.0.6.3", + "--state", "active" + ]) + + import re + id_match = re.search(r"ID: (\d+)", create_result.output) + server_id = id_match.group(1) + + # Delete with no confirmation + result = runner.invoke(cli, [ + "delete", + server_id + ], input="n\n") + assert result.exit_code == 0 + assert "cancelled" in result.output + + +@pytest.mark.integration +def test_delete_server_not_found(runner, clean_db): + """Test deleting a non-existent server.""" + result = runner.invoke(cli, ["delete", "99999", "--yes"]) + assert result.exit_code == 1 + assert "not found" in result.output diff --git a/tests/test_server_service.py b/tests/test_server_service.py new file mode 100644 index 0000000..22ac96d --- /dev/null +++ b/tests/test_server_service.py @@ -0,0 +1,359 @@ +""" +Unit tests for server service layer. +""" +import pytest +from unittest.mock import Mock, patch +from datetime import datetime + +from app.services.server_service import ServerService +from app.schemas.server import ServerCreate, ServerUpdate, ServerState, ServerResponse + + +@pytest.fixture +def mock_db(): + """Create a mock database.""" + return Mock() + + +@pytest.fixture +def mock_server_model(): + """Create a mock server model.""" + return Mock() + + +@pytest.fixture +def service_with_mocks(mock_db, mock_server_model): + """Create a service with mocked dependencies.""" + with patch('app.services.server_service.get_db', return_value=mock_db): + with patch('app.services.server_service.ServerModel', return_value=mock_server_model): + service = ServerService() + service.server_model = mock_server_model + return service + + +@pytest.fixture +def sample_server_dict(): + """Sample server dictionary as returned from database.""" + return { + 'id': 1, + 'hostname': 'web-server-01', + 'ip_address': '192.168.1.100', + 'name': 'Web Server 1', + 'state': 'active', + 'created_at': datetime(2024, 1, 15, 10, 30, 0), + 'updated_at': datetime(2024, 1, 15, 10, 30, 0) + } + + +@pytest.fixture +def sample_server_create(): + """Sample server create schema.""" + return ServerCreate( + hostname='web-server-01', + ip_address='192.168.1.100', + name='Web Server 1', + state=ServerState.ACTIVE + ) + + +@pytest.mark.unit +class TestCreateServer: + """Tests for create_server method.""" + + def test_create_server_success(self, service_with_mocks, sample_server_create, sample_server_dict): + """Test successful server creation.""" + # Setup mocks + service_with_mocks.server_model.get_by_hostname.return_value = None + service_with_mocks.server_model.create.return_value = 1 + service_with_mocks.server_model.get_by_id.return_value = sample_server_dict + + # Execute + result = service_with_mocks.create_server(sample_server_create) + + # Verify + assert isinstance(result, ServerResponse) + assert result.id == 1 + assert result.hostname == 'web-server-01' + assert result.ip_address == '192.168.1.100' + service_with_mocks.server_model.get_by_hostname.assert_called_once_with('web-server-01') + service_with_mocks.server_model.create.assert_called_once_with(sample_server_create) + service_with_mocks.server_model.get_by_id.assert_called_once_with(1) + + def test_create_server_duplicate_hostname(self, service_with_mocks, sample_server_create, sample_server_dict): + """Test creating server with duplicate hostname raises error.""" + # Setup mocks - hostname already exists + service_with_mocks.server_model.get_by_hostname.return_value = sample_server_dict + + # Execute and verify + with pytest.raises(ValueError) as exc_info: + service_with_mocks.create_server(sample_server_create) + + assert "already exists" in str(exc_info.value) + assert "web-server-01" in str(exc_info.value) + service_with_mocks.server_model.create.assert_not_called() + + def test_create_server_different_states(self, service_with_mocks, sample_server_dict): + """Test creating servers with different states.""" + states = [ServerState.ACTIVE, ServerState.OFFLINE, ServerState.RETIRED] + + for state in states: + service_with_mocks.server_model.get_by_hostname.return_value = None + service_with_mocks.server_model.create.return_value = 1 + + server_dict = sample_server_dict.copy() + server_dict['state'] = state.value + service_with_mocks.server_model.get_by_id.return_value = server_dict + + server_create = ServerCreate( + hostname=f'server-{state.value}', + ip_address='10.0.0.1', + state=state + ) + + result = service_with_mocks.create_server(server_create) + assert result.state == state.value + + +@pytest.mark.unit +class TestGetServer: + """Tests for get_server method.""" + + def test_get_server_success(self, service_with_mocks, sample_server_dict): + """Test successfully getting a server by ID.""" + service_with_mocks.server_model.get_by_id.return_value = sample_server_dict + + result = service_with_mocks.get_server(1) + + assert isinstance(result, ServerResponse) + assert result.id == 1 + assert result.hostname == 'web-server-01' + service_with_mocks.server_model.get_by_id.assert_called_once_with(1) + + def test_get_server_not_found(self, service_with_mocks): + """Test getting a server that doesn't exist.""" + service_with_mocks.server_model.get_by_id.return_value = None + + result = service_with_mocks.get_server(99999) + + assert result is None + service_with_mocks.server_model.get_by_id.assert_called_once_with(99999) + + +@pytest.mark.unit +class TestGetServers: + """Tests for get_servers method.""" + + def test_get_servers_empty(self, service_with_mocks): + """Test getting servers when none exist.""" + service_with_mocks.server_model.get_all.return_value = [] + + result = service_with_mocks.get_servers() + + assert result == [] + service_with_mocks.server_model.get_all.assert_called_once_with(0, 100) + + def test_get_servers_multiple(self, service_with_mocks): + """Test getting multiple servers.""" + servers = [ + { + 'id': i, + 'hostname': f'server-{i:02d}', + 'ip_address': f'10.0.0.{i}', + 'name': f'Server {i}', + 'state': 'active', + 'created_at': datetime(2024, 1, 15, 10, 30, 0), + 'updated_at': datetime(2024, 1, 15, 10, 30, 0) + } + for i in range(1, 6) + ] + service_with_mocks.server_model.get_all.return_value = servers + + result = service_with_mocks.get_servers() + + assert len(result) == 5 + assert all(isinstance(s, ServerResponse) for s in result) + assert result[0].hostname == 'server-01' + assert result[4].hostname == 'server-05' + + def test_get_servers_with_pagination(self, service_with_mocks): + """Test getting servers with pagination parameters.""" + service_with_mocks.server_model.get_all.return_value = [] + + service_with_mocks.get_servers(skip=10, limit=20) + + service_with_mocks.server_model.get_all.assert_called_once_with(10, 20) + + +@pytest.mark.unit +class TestUpdateServer: + """Tests for update_server method.""" + + def test_update_server_success(self, service_with_mocks, sample_server_dict): + """Test successfully updating a server.""" + # Setup + updated_dict = sample_server_dict.copy() + updated_dict['name'] = 'Updated Server' + updated_dict['state'] = 'offline' + + service_with_mocks.server_model.get_by_id.side_effect = [ + sample_server_dict, # First call to check existence + updated_dict # Second call to get updated server + ] + service_with_mocks.server_model.update.return_value = 1 + + update_data = ServerUpdate(name='Updated Server', state=ServerState.OFFLINE) + + # Execute + result = service_with_mocks.update_server(1, update_data) + + # Verify + assert isinstance(result, ServerResponse) + assert result.name == 'Updated Server' + assert result.state == 'offline' + service_with_mocks.server_model.update.assert_called_once_with(1, update_data) + + def test_update_server_not_found(self, service_with_mocks): + """Test updating a server that doesn't exist.""" + service_with_mocks.server_model.get_by_id.return_value = None + + update_data = ServerUpdate(name='New Name') + result = service_with_mocks.update_server(99999, update_data) + + assert result is None + service_with_mocks.server_model.update.assert_not_called() + + def test_update_server_hostname_to_existing(self, service_with_mocks, sample_server_dict): + """Test updating hostname to one that already exists.""" + # Setup + existing_server = sample_server_dict.copy() + another_server = sample_server_dict.copy() + another_server['id'] = 2 + another_server['hostname'] = 'another-server' + + service_with_mocks.server_model.get_by_id.return_value = another_server + service_with_mocks.server_model.get_by_hostname.return_value = existing_server + + update_data = ServerUpdate(hostname='web-server-01') + + # Execute and verify + with pytest.raises(ValueError) as exc_info: + service_with_mocks.update_server(2, update_data) + + assert "already exists" in str(exc_info.value) + service_with_mocks.server_model.update.assert_not_called() + + def test_update_server_hostname_to_same(self, service_with_mocks, sample_server_dict): + """Test updating hostname to the same value (should succeed).""" + updated_dict = sample_server_dict.copy() + + service_with_mocks.server_model.get_by_id.side_effect = [ + sample_server_dict, + updated_dict + ] + service_with_mocks.server_model.update.return_value = 1 + + update_data = ServerUpdate(hostname='web-server-01') # Same hostname + + result = service_with_mocks.update_server(1, update_data) + + assert result is not None + service_with_mocks.server_model.get_by_hostname.assert_not_called() + service_with_mocks.server_model.update.assert_called_once() + + def test_update_server_partial(self, service_with_mocks, sample_server_dict): + """Test partial update (only some fields).""" + updated_dict = sample_server_dict.copy() + updated_dict['state'] = 'retired' + + service_with_mocks.server_model.get_by_id.side_effect = [ + sample_server_dict, + updated_dict + ] + service_with_mocks.server_model.update.return_value = 1 + + update_data = ServerUpdate(state=ServerState.RETIRED) + + result = service_with_mocks.update_server(1, update_data) + + assert result.state == 'retired' + assert result.hostname == 'web-server-01' # Unchanged + + +@pytest.mark.unit +class TestDeleteServer: + """Tests for delete_server method.""" + + def test_delete_server_success(self, service_with_mocks): + """Test successfully deleting a server.""" + service_with_mocks.server_model.delete.return_value = 1 + + result = service_with_mocks.delete_server(1) + + assert result is True + service_with_mocks.server_model.delete.assert_called_once_with(1) + + def test_delete_server_not_found(self, service_with_mocks): + """Test deleting a server that doesn't exist.""" + service_with_mocks.server_model.delete.return_value = 0 + + result = service_with_mocks.delete_server(99999) + + assert result is False + service_with_mocks.server_model.delete.assert_called_once_with(99999) + + +@pytest.mark.unit +class TestBusinessLogic: + """Tests for business logic edge cases.""" + + def test_create_server_without_name(self, service_with_mocks, sample_server_dict): + """Test creating a server without optional name field.""" + service_with_mocks.server_model.get_by_hostname.return_value = None + service_with_mocks.server_model.create.return_value = 1 + + server_dict = sample_server_dict.copy() + server_dict['hostname'] = 'no-name-server' + server_dict['ip_address'] = '10.0.0.1' + server_dict['name'] = None + service_with_mocks.server_model.get_by_id.return_value = server_dict + + server_create = ServerCreate( + hostname='no-name-server', + ip_address='10.0.0.1', + state=ServerState.ACTIVE + ) + + result = service_with_mocks.create_server(server_create) + + assert result.name is None + assert result.hostname == 'no-name-server' + + def test_update_server_empty_update(self, service_with_mocks, sample_server_dict): + """Test update with no fields changed still works.""" + service_with_mocks.server_model.get_by_id.side_effect = [ + sample_server_dict, + sample_server_dict + ] + service_with_mocks.server_model.update.return_value = 1 + + # Update with all None values + update_data = ServerUpdate() + + result = service_with_mocks.update_server(1, update_data) + + # Should still call update (even if no changes) + assert result is not None + service_with_mocks.server_model.update.assert_called_once() + + def test_service_initialization(self): + """Test that service initializes correctly with real dependencies.""" + with patch('app.services.server_service.get_db') as mock_get_db: + with patch('app.services.server_service.ServerModel') as mock_model_class: + mock_db = Mock() + mock_get_db.return_value = mock_db + + service = ServerService() + + mock_get_db.assert_called_once() + mock_model_class.assert_called_once_with(mock_db) + assert service.db == mock_db From 18ac30120a790b0832f20fcb17d485194cf6a4a6 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 17:36:52 +0100 Subject: [PATCH 13/17] Fixes by ruff Fix formating etc --- README.md | 3 +- app/api/servers.py | 18 +- app/cli.py | 70 +++++-- app/core/config.py | 14 +- app/db/database.py | 8 +- app/main.py | 5 +- app/models/server.py | 6 +- app/schemas/server.py | 10 +- app/services/server_service.py | 12 +- tests/conftest.py | 6 +- tests/test_cli.py | 343 ++++++++++++++++++++------------- tests/test_server_api.py | 17 +- tests/test_server_service.py | 129 ++++++------- 13 files changed, 369 insertions(+), 272 deletions(-) diff --git a/README.md b/README.md index 3145d38..62070da 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Instructions -You are developing an inventory management software solution for a cloud services company that provisions servers in multiple data centers. You must build a CRUD app for tracking the state of all the servers. +You are developing an inventory management software solution for a cloud services company that provisions servers in multiple data centers. You must build a CRUD app for tracking the state of all the servers. Deliverables: - PR to https://github.com/Mathpix/hiring-challenge-devops-python that includes: @@ -28,4 +28,3 @@ Validate that: - IP address looks like an IP State is one of: active, offline, retired - diff --git a/app/api/servers.py b/app/api/servers.py index ef7fc2d..e952f6a 100644 --- a/app/api/servers.py +++ b/app/api/servers.py @@ -1,10 +1,10 @@ """ Server API endpoints. """ -from typing import List -from fastapi import APIRouter, HTTPException, status, Query -from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse +from fastapi import APIRouter, HTTPException, Query, status + +from app.schemas.server import ServerCreate, ServerResponse, ServerUpdate from app.services.server_service import ServerService router = APIRouter() @@ -20,10 +20,10 @@ async def create_server(server: ServerCreate): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) -@router.get("/", response_model=List[ServerResponse]) +@router.get("/", response_model=list[ServerResponse]) async def list_servers( skip: int = Query(0, ge=0, description="Number of records to skip"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of records to return") + limit: int = Query(100, ge=1, le=1000, description="Maximum number of records to return"), ): """List all servers with pagination.""" service = ServerService() @@ -37,8 +37,7 @@ async def get_server(server_id: int): server = service.get_server(server_id) if not server: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f"Server with id {server_id} not found" + status_code=status.HTTP_404_NOT_FOUND, detail=f"Server with id {server_id} not found" ) return server @@ -52,7 +51,7 @@ async def update_server(server_id: int, server_update: ServerUpdate): if not server: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f"Server with id {server_id} not found" + detail=f"Server with id {server_id} not found", ) return server except ValueError as e: @@ -65,6 +64,5 @@ async def delete_server(server_id: int): service = ServerService() if not service.delete_server(server_id): raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f"Server with id {server_id} not found" + status_code=status.HTTP_404_NOT_FOUND, detail=f"Server with id {server_id} not found" ) diff --git a/app/cli.py b/app/cli.py index 19b5d41..f60f0c3 100644 --- a/app/cli.py +++ b/app/cli.py @@ -1,14 +1,14 @@ """ CLI for server inventory management. """ -import os import sys -import click from typing import Optional + +import click from tabulate import tabulate # Ensure we can import app modules -from app.schemas.server import ServerCreate, ServerUpdate, ServerState +from app.schemas.server import ServerCreate, ServerState, ServerUpdate from app.services.server_service import ServerService @@ -26,17 +26,14 @@ def cli(): "--state", type=click.Choice(["active", "offline", "retired"], case_sensitive=False), default="active", - help="Server state (default: active)" + help="Server state (default: active)", ) def create(hostname: str, ip_address: str, name: Optional[str], state: str): """Create a new server.""" try: service = ServerService() server_data = ServerCreate( - hostname=hostname, - ip_address=ip_address, - name=name, - state=ServerState(state.lower()) + hostname=hostname, ip_address=ip_address, name=name, state=ServerState(state.lower()) ) server = service.create_server(server_data) @@ -45,7 +42,9 @@ def create(hostname: str, ip_address: str, name: Optional[str], state: str): click.echo(f"Hostname: {server.hostname}") click.echo(f"IP Address: {server.ip_address}") click.echo(f"Name: {server.name or 'N/A'}") - click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo( + f"State: {server.state.value if hasattr(server.state, 'value') else server.state}" + ) click.echo(f"Created at: {server.created_at}") except ValueError as e: @@ -59,7 +58,13 @@ def create(hostname: str, ip_address: str, name: Optional[str], state: str): @cli.command() @click.option("--skip", default=0, help="Number of records to skip") @click.option("--limit", default=100, help="Maximum number of records to return") -@click.option("--format", "output_format", type=click.Choice(["table", "json"]), default="table", help="Output format") +@click.option( + "--format", + "output_format", + type=click.Choice(["table", "json"]), + default="table", + help="Output format", +) def list(skip: int, limit: int, output_format: str): """List all servers.""" try: @@ -72,6 +77,7 @@ def list(skip: int, limit: int, output_format: str): if output_format == "json": import json + servers_data = [ { "id": s.id, @@ -80,7 +86,7 @@ def list(skip: int, limit: int, output_format: str): "name": s.name, "state": s.state, "created_at": s.created_at.isoformat(), - "updated_at": s.updated_at.isoformat() + "updated_at": s.updated_at.isoformat(), } for s in servers ] @@ -101,7 +107,13 @@ def list(skip: int, limit: int, output_format: str): @cli.command() @click.argument("server_id", type=int) -@click.option("--format", "output_format", type=click.Choice(["table", "json"]), default="table", help="Output format") +@click.option( + "--format", + "output_format", + type=click.Choice(["table", "json"]), + default="table", + help="Output format", +) def get(server_id: int, output_format: str): """Get server by ID.""" try: @@ -114,6 +126,7 @@ def get(server_id: int, output_format: str): if output_format == "json": import json + server_data = { "id": server.id, "hostname": server.hostname, @@ -121,16 +134,18 @@ def get(server_id: int, output_format: str): "name": server.name, "state": server.state, "created_at": server.created_at.isoformat(), - "updated_at": server.updated_at.isoformat() + "updated_at": server.updated_at.isoformat(), } click.echo(json.dumps(server_data, indent=2)) else: - click.echo(f"\nServer Details:\n") + click.echo("\nServer Details:\n") click.echo(f"ID: {server.id}") click.echo(f"Hostname: {server.hostname}") click.echo(f"IP Address: {server.ip_address}") click.echo(f"Name: {server.name or 'N/A'}") - click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo( + f"State: {server.state.value if hasattr(server.state, 'value') else server.state}" + ) click.echo(f"Created at: {server.created_at}") click.echo(f"Updated at: {server.updated_at}") @@ -147,14 +162,23 @@ def get(server_id: int, output_format: str): @click.option( "--state", type=click.Choice(["active", "offline", "retired"], case_sensitive=False), - help="New server state" + help="New server state", ) -def update(server_id: int, hostname: Optional[str], ip_address: Optional[str], name: Optional[str], state: Optional[str]): +def update( + server_id: int, + hostname: Optional[str], + ip_address: Optional[str], + name: Optional[str], + state: Optional[str], +): """Update server by ID.""" try: # Check if at least one field is provided if not any([hostname, ip_address, name, state]): - click.echo(click.style("Error: At least one field must be provided for update.", fg="red"), err=True) + click.echo( + click.style("Error: At least one field must be provided for update.", fg="red"), + err=True, + ) sys.exit(1) service = ServerService() @@ -182,7 +206,9 @@ def update(server_id: int, hostname: Optional[str], ip_address: Optional[str], n click.echo(f"Hostname: {server.hostname}") click.echo(f"IP Address: {server.ip_address}") click.echo(f"Name: {server.name or 'N/A'}") - click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}") + click.echo( + f"State: {server.state.value if hasattr(server.state, 'value') else server.state}" + ) click.echo(f"Updated at: {server.updated_at}") except ValueError as e: @@ -209,12 +235,14 @@ def delete(server_id: int, yes: bool): # Confirm deletion unless --yes flag is provided if not yes: - click.echo(f"\nServer to delete:") + click.echo("\nServer to delete:") click.echo(f"ID: {server.id}") click.echo(f"Hostname: {server.hostname}") click.echo(f"IP Address: {server.ip_address}") click.echo(f"Name: {server.name or 'N/A'}") - click.echo(f"State: {server.state.value if hasattr(server.state, 'value') else server.state}\n") + click.echo( + f"State: {server.state.value if hasattr(server.state, 'value') else server.state}\n" + ) if not click.confirm("Are you sure you want to delete this server?"): click.echo("Deletion cancelled.") diff --git a/app/core/config.py b/app/core/config.py index 122d542..3d77a55 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -3,7 +3,7 @@ """ import os from functools import lru_cache -from typing import List, Union + from pydantic import Field, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict @@ -23,7 +23,9 @@ class SettingsBase(BaseSettings): TEST_DATABASE_URL: str = "" # CORS (stored as string, converted to list via property) - allowed_origins_str: str = Field(default="http://localhost:8000", validation_alias="ALLOWED_ORIGINS") + allowed_origins_str: str = Field( + default="http://localhost:8000", validation_alias="ALLOWED_ORIGINS" + ) ENABLE_CORS: bool = True # Logging/Debug @@ -47,7 +49,7 @@ class SettingsBase(BaseSettings): ) @property - def ALLOWED_ORIGINS(self) -> List[str]: + def ALLOWED_ORIGINS(self) -> list[str]: """Parse and return CORS origins as a list.""" if not self.allowed_origins_str or self.allowed_origins_str.strip() == "": return ["http://localhost:8000"] @@ -98,11 +100,13 @@ def validate_required_fields(cls, v, info): def validate_cors_origins(cls, v): """Ensure CORS origins are explicitly set in production.""" if not v or v.strip() == "" or v == "*": - raise ValueError("ALLOWED_ORIGINS must be explicitly set in production (not * or empty)") + raise ValueError( + "ALLOWED_ORIGINS must be explicitly set in production (not * or empty)" + ) return v -@lru_cache() +@lru_cache def get_settings() -> SettingsBase: """Factory function to get environment-specific settings (cached singleton).""" env = os.getenv("APP_ENV", "development").lower() diff --git a/app/db/database.py b/app/db/database.py index 0ea75bf..d3f7068 100644 --- a/app/db/database.py +++ b/app/db/database.py @@ -1,12 +1,13 @@ """ Database connection and management using raw SQL. """ -import psycopg2 -import psycopg2.extras +from collections.abc import Generator from contextlib import contextmanager -from typing import Generator from urllib.parse import urlparse +import psycopg2 +import psycopg2.extras + from app.core.config import settings @@ -91,6 +92,7 @@ def get_db() -> Database: def init_db(): """Initialize database and create tables.""" from app.db.schema import create_tables + create_tables() diff --git a/app/main.py b/app/main.py index 4f488ae..1151ea4 100644 --- a/app/main.py +++ b/app/main.py @@ -2,11 +2,12 @@ Main FastAPI application entry point. """ from contextlib import asynccontextmanager + from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware +from app.api import health, servers from app.core.config import settings -from app.api import servers, health @asynccontextmanager @@ -14,12 +15,14 @@ async def lifespan(app: FastAPI): """Application lifespan events.""" # Startup from app.db.database import init_db + init_db() yield # Shutdown from app.db.database import close_db + close_db() diff --git a/app/models/server.py b/app/models/server.py index 3511df8..02e8928 100644 --- a/app/models/server.py +++ b/app/models/server.py @@ -1,8 +1,8 @@ """ Server data access layer using raw SQL. """ -from typing import List, Optional from datetime import datetime +from typing import Optional from app.db.database import Database from app.schemas.server import ServerCreate, ServerUpdate @@ -39,7 +39,7 @@ def get_by_hostname(self, hostname: str) -> Optional[dict]: return dict(results[0]) return None - def get_all(self, skip: int = 0, limit: int = 100) -> List[dict]: + def get_all(self, skip: int = 0, limit: int = 100) -> list[dict]: """Get all servers with pagination.""" query = "SELECT * FROM servers ORDER BY created_at DESC LIMIT %s OFFSET %s" results = self.db.execute_query(query, (limit, skip)) @@ -82,4 +82,4 @@ def count(self) -> int: """Count total servers.""" query = "SELECT COUNT(*) as count FROM servers" result = self.db.execute_query(query) - return result[0]['count'] if result else 0 + return result[0]["count"] if result else 0 diff --git a/app/schemas/server.py b/app/schemas/server.py index 9189621..4f1cd87 100644 --- a/app/schemas/server.py +++ b/app/schemas/server.py @@ -1,15 +1,17 @@ """ Server schema definitions using Pydantic. """ -from typing import Optional +import ipaddress from datetime import datetime from enum import Enum +from typing import Optional + from pydantic import BaseModel, Field, field_validator -import ipaddress class ServerState(str, Enum): """Server state enumeration.""" + ACTIVE = "active" OFFLINE = "offline" RETIRED = "retired" @@ -17,6 +19,7 @@ class ServerState(str, Enum): class ServerBase(BaseModel): """Base server schema.""" + hostname: str = Field(..., min_length=1, max_length=255, description="Unique server hostname") ip_address: str = Field(..., description="Server IP address") name: Optional[str] = Field(None, max_length=255, description="Server name/description") @@ -36,11 +39,13 @@ def validate_ip_address(cls, v: str) -> str: class ServerCreate(ServerBase): """Schema for creating a server.""" + pass class ServerUpdate(BaseModel): """Schema for updating a server.""" + hostname: Optional[str] = Field(None, min_length=1, max_length=255) ip_address: Optional[str] = None name: Optional[str] = Field(None, max_length=255) @@ -61,6 +66,7 @@ def validate_ip_address(cls, v: Optional[str]) -> Optional[str]: class ServerResponse(ServerBase): """Schema for server response.""" + id: int created_at: datetime updated_at: datetime diff --git a/app/services/server_service.py b/app/services/server_service.py index a43ff4f..b00669a 100644 --- a/app/services/server_service.py +++ b/app/services/server_service.py @@ -1,11 +1,11 @@ """ Server service layer with business logic. """ -from typing import List, Optional +from typing import Optional from app.db.database import get_db from app.models.server import ServerModel -from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse +from app.schemas.server import ServerCreate, ServerResponse, ServerUpdate class ServerService: @@ -35,12 +35,14 @@ def get_server(self, server_id: int) -> Optional[ServerResponse]: return ServerResponse(**server_dict) return None - def get_servers(self, skip: int = 0, limit: int = 100) -> List[ServerResponse]: + def get_servers(self, skip: int = 0, limit: int = 100) -> list[ServerResponse]: """Get all servers with pagination.""" servers = self.server_model.get_all(skip, limit) return [ServerResponse(**server) for server in servers] - def update_server(self, server_id: int, server_update: ServerUpdate) -> Optional[ServerResponse]: + def update_server( + self, server_id: int, server_update: ServerUpdate + ) -> Optional[ServerResponse]: """Update server.""" # Check if server exists existing_server = self.server_model.get_by_id(server_id) @@ -48,7 +50,7 @@ def update_server(self, server_id: int, server_update: ServerUpdate) -> Optional return None # Check hostname uniqueness if being updated - if server_update.hostname and server_update.hostname != existing_server['hostname']: + if server_update.hostname and server_update.hostname != existing_server["hostname"]: if self.server_model.get_by_hostname(server_update.hostname): raise ValueError(f"Server with hostname '{server_update.hostname}' already exists") diff --git a/tests/conftest.py b/tests/conftest.py index 173ddb4..6d357b4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ Pytest configuration and fixtures. """ import os + import pytest from fastapi.testclient import TestClient from testcontainers.postgres import PostgresContainer @@ -9,10 +10,9 @@ # Set test environment BEFORE importing app os.environ["APP_ENV"] = "testing" -from app.main import app -from app.core.config import get_settings from app.db.database import Database from app.db.schema import create_tables, drop_tables +from app.main import app @pytest.fixture(scope="session") @@ -64,5 +64,5 @@ def sample_server_data(): "hostname": "web-server-01", "ip_address": "192.168.1.100", "name": "Web Server 1", - "state": "active" + "state": "active", } diff --git a/tests/test_cli.py b/tests/test_cli.py index 98cdef2..b32e6b9 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -3,6 +3,7 @@ """ import pytest from click.testing import CliRunner + from app.cli import cli @@ -28,13 +29,20 @@ def test_cli_help(runner): @pytest.mark.integration def test_create_server_success(runner, clean_db): """Test creating a server via CLI.""" - result = runner.invoke(cli, [ - "create", - "--hostname", "cli-web-01", - "--ip-address", "192.168.1.200", - "--name", "CLI Web Server", - "--state", "active" - ]) + result = runner.invoke( + cli, + [ + "create", + "--hostname", + "cli-web-01", + "--ip-address", + "192.168.1.200", + "--name", + "CLI Web Server", + "--state", + "active", + ], + ) assert result.exit_code == 0 assert "Server created successfully" in result.output assert "cli-web-01" in result.output @@ -44,10 +52,7 @@ def test_create_server_success(runner, clean_db): @pytest.mark.integration def test_create_server_missing_required_field(runner): """Test creating a server with missing required field.""" - result = runner.invoke(cli, [ - "create", - "--hostname", "incomplete-server" - ]) + result = runner.invoke(cli, ["create", "--hostname", "incomplete-server"]) assert result.exit_code != 0 assert "Missing option" in result.output or "required" in result.output.lower() @@ -55,12 +60,18 @@ def test_create_server_missing_required_field(runner): @pytest.mark.integration def test_create_server_invalid_ip(runner, clean_db): """Test creating a server with invalid IP address.""" - result = runner.invoke(cli, [ - "create", - "--hostname", "invalid-ip-server", - "--ip-address", "999.999.999.999", - "--state", "active" - ]) + result = runner.invoke( + cli, + [ + "create", + "--hostname", + "invalid-ip-server", + "--ip-address", + "999.999.999.999", + "--state", + "active", + ], + ) assert result.exit_code == 1 assert "Error" in result.output assert "Invalid IP address" in result.output @@ -70,20 +81,32 @@ def test_create_server_invalid_ip(runner, clean_db): def test_create_server_duplicate_hostname(runner, clean_db): """Test creating a server with duplicate hostname.""" # Create first server - runner.invoke(cli, [ - "create", - "--hostname", "duplicate-server", - "--ip-address", "192.168.1.100", - "--state", "active" - ]) + runner.invoke( + cli, + [ + "create", + "--hostname", + "duplicate-server", + "--ip-address", + "192.168.1.100", + "--state", + "active", + ], + ) # Try to create second server with same hostname - result = runner.invoke(cli, [ - "create", - "--hostname", "duplicate-server", - "--ip-address", "192.168.1.101", - "--state", "active" - ]) + result = runner.invoke( + cli, + [ + "create", + "--hostname", + "duplicate-server", + "--ip-address", + "192.168.1.101", + "--state", + "active", + ], + ) assert result.exit_code == 1 assert "Error" in result.output assert "already exists" in result.output @@ -95,12 +118,18 @@ def test_create_server_with_all_states(runner, clean_db): states = ["active", "offline", "retired"] for state in states: - result = runner.invoke(cli, [ - "create", - "--hostname", f"server-{state}", - "--ip-address", f"10.0.0.{states.index(state) + 1}", - "--state", state - ]) + result = runner.invoke( + cli, + [ + "create", + "--hostname", + f"server-{state}", + "--ip-address", + f"10.0.0.{states.index(state) + 1}", + "--state", + state, + ], + ) assert result.exit_code == 0 assert "Server created successfully" in result.output assert state in result.output @@ -119,12 +148,18 @@ def test_list_servers_table_format(runner, clean_db): """Test listing servers in table format.""" # Create a few servers for i in range(3): - runner.invoke(cli, [ - "create", - "--hostname", f"list-server-{i:02d}", - "--ip-address", f"10.0.1.{i+1}", - "--state", "active" - ]) + runner.invoke( + cli, + [ + "create", + "--hostname", + f"list-server-{i:02d}", + "--ip-address", + f"10.0.1.{i+1}", + "--state", + "active", + ], + ) result = runner.invoke(cli, ["list"]) assert result.exit_code == 0 @@ -138,12 +173,10 @@ def test_list_servers_table_format(runner, clean_db): def test_list_servers_json_format(runner, clean_db): """Test listing servers in JSON format.""" # Create a server - runner.invoke(cli, [ - "create", - "--hostname", "json-server", - "--ip-address", "10.0.2.1", - "--state", "active" - ]) + runner.invoke( + cli, + ["create", "--hostname", "json-server", "--ip-address", "10.0.2.1", "--state", "active"], + ) result = runner.invoke(cli, ["list", "--format", "json"]) assert result.exit_code == 0 @@ -157,12 +190,18 @@ def test_list_servers_pagination(runner, clean_db): """Test listing servers with pagination.""" # Create 5 servers for i in range(5): - runner.invoke(cli, [ - "create", - "--hostname", f"page-server-{i:02d}", - "--ip-address", f"10.0.3.{i+1}", - "--state", "active" - ]) + runner.invoke( + cli, + [ + "create", + "--hostname", + f"page-server-{i:02d}", + "--ip-address", + f"10.0.3.{i+1}", + "--state", + "active", + ], + ) # Test skip and limit result = runner.invoke(cli, ["list", "--skip", "2", "--limit", "2"]) @@ -174,17 +213,25 @@ def test_list_servers_pagination(runner, clean_db): def test_get_server_success(runner, clean_db): """Test getting a server by ID.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "get-server", - "--ip-address", "10.0.4.1", - "--name", "Get Test Server", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "get-server", + "--ip-address", + "10.0.4.1", + "--name", + "Get Test Server", + "--state", + "active", + ], + ) # Extract ID from create output (hacky but works for test) # The output should contain "ID: " import re + id_match = re.search(r"ID: (\d+)", create_result.output) assert id_match is not None server_id = id_match.group(1) @@ -202,14 +249,21 @@ def test_get_server_success(runner, clean_db): def test_get_server_json_format(runner, clean_db): """Test getting a server in JSON format.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "json-get-server", - "--ip-address", "10.0.4.2", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "json-get-server", + "--ip-address", + "10.0.4.2", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) @@ -232,25 +286,30 @@ def test_get_server_not_found(runner, clean_db): def test_update_server_success(runner, clean_db): """Test updating a server.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "update-server", - "--ip-address", "10.0.5.1", - "--name", "Original Name", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "update-server", + "--ip-address", + "10.0.5.1", + "--name", + "Original Name", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) # Update the server - result = runner.invoke(cli, [ - "update", - server_id, - "--name", "Updated Name", - "--state", "offline" - ]) + result = runner.invoke( + cli, ["update", server_id, "--name", "Updated Name", "--state", "offline"] + ) assert result.exit_code == 0 assert "Server updated successfully" in result.output assert "Updated Name" in result.output @@ -261,23 +320,18 @@ def test_update_server_success(runner, clean_db): def test_update_server_hostname(runner, clean_db): """Test updating server hostname.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "old-hostname", - "--ip-address", "10.0.5.2", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + ["create", "--hostname", "old-hostname", "--ip-address", "10.0.5.2", "--state", "active"], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) # Update hostname - result = runner.invoke(cli, [ - "update", - server_id, - "--hostname", "new-hostname" - ]) + result = runner.invoke(cli, ["update", server_id, "--hostname", "new-hostname"]) assert result.exit_code == 0 assert "new-hostname" in result.output @@ -286,14 +340,21 @@ def test_update_server_hostname(runner, clean_db): def test_update_server_no_fields(runner, clean_db): """Test updating a server without providing any fields.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "no-update-server", - "--ip-address", "10.0.5.3", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "no-update-server", + "--ip-address", + "10.0.5.3", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) @@ -307,11 +368,7 @@ def test_update_server_no_fields(runner, clean_db): @pytest.mark.integration def test_update_server_not_found(runner, clean_db): """Test updating a non-existent server.""" - result = runner.invoke(cli, [ - "update", - "99999", - "--name", "New Name" - ]) + result = runner.invoke(cli, ["update", "99999", "--name", "New Name"]) assert result.exit_code == 1 assert "not found" in result.output @@ -320,23 +377,26 @@ def test_update_server_not_found(runner, clean_db): def test_update_server_invalid_ip(runner, clean_db): """Test updating a server with invalid IP.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "invalid-update-server", - "--ip-address", "10.0.5.4", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "invalid-update-server", + "--ip-address", + "10.0.5.4", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) # Try to update with invalid IP - result = runner.invoke(cli, [ - "update", - server_id, - "--ip-address", "not-an-ip" - ]) + result = runner.invoke(cli, ["update", server_id, "--ip-address", "not-an-ip"]) assert result.exit_code == 1 assert "Error" in result.output @@ -345,22 +405,18 @@ def test_update_server_invalid_ip(runner, clean_db): def test_delete_server_with_confirmation(runner, clean_db): """Test deleting a server with confirmation.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "delete-server", - "--ip-address", "10.0.6.1", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + ["create", "--hostname", "delete-server", "--ip-address", "10.0.6.1", "--state", "active"], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) # Delete with yes confirmation - result = runner.invoke(cli, [ - "delete", - server_id - ], input="y\n") + result = runner.invoke(cli, ["delete", server_id], input="y\n") assert result.exit_code == 0 assert "deleted successfully" in result.output @@ -369,14 +425,21 @@ def test_delete_server_with_confirmation(runner, clean_db): def test_delete_server_skip_confirmation(runner, clean_db): """Test deleting a server with --yes flag.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "quick-delete-server", - "--ip-address", "10.0.6.2", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "quick-delete-server", + "--ip-address", + "10.0.6.2", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) @@ -390,22 +453,26 @@ def test_delete_server_skip_confirmation(runner, clean_db): def test_delete_server_cancel_confirmation(runner, clean_db): """Test canceling server deletion.""" # Create a server - create_result = runner.invoke(cli, [ - "create", - "--hostname", "cancel-delete-server", - "--ip-address", "10.0.6.3", - "--state", "active" - ]) + create_result = runner.invoke( + cli, + [ + "create", + "--hostname", + "cancel-delete-server", + "--ip-address", + "10.0.6.3", + "--state", + "active", + ], + ) import re + id_match = re.search(r"ID: (\d+)", create_result.output) server_id = id_match.group(1) # Delete with no confirmation - result = runner.invoke(cli, [ - "delete", - server_id - ], input="n\n") + result = runner.invoke(cli, ["delete", server_id], input="n\n") assert result.exit_code == 0 assert "cancelled" in result.output diff --git a/tests/test_server_api.py b/tests/test_server_api.py index aa0be5a..7362265 100644 --- a/tests/test_server_api.py +++ b/tests/test_server_api.py @@ -12,7 +12,7 @@ def sample_server_data(): "hostname": "web-server-01", "ip_address": "192.168.1.100", "name": "Web Server 1", - "state": "active" + "state": "active", } @@ -50,7 +50,7 @@ def test_create_server_invalid_ip(client): server_data = { "hostname": "invalid-server", "ip_address": "999.999.999.999", # Invalid IP - "state": "active" + "state": "active", } response = client.post("/servers/", json=server_data) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY @@ -102,10 +102,7 @@ def test_update_server(client, sample_server_data): server_id = create_response.json()["id"] # Update the server - update_data = { - "name": "Updated Web Server", - "state": "offline" - } + update_data = {"name": "Updated Web Server", "state": "offline"} response = client.put(f"/servers/{server_id}", json=update_data) assert response.status_code == status.HTTP_200_OK @@ -157,11 +154,7 @@ def test_server_state_enum(client): valid_states = ["active", "offline", "retired"] for state in valid_states: - server_data = { - "hostname": f"server-{state}", - "ip_address": "10.0.0.1", - "state": state - } + server_data = {"hostname": f"server-{state}", "ip_address": "10.0.0.1", "state": state} response = client.post("/servers/", json=server_data) assert response.status_code == status.HTTP_201_CREATED assert response.json()["state"] == state @@ -175,7 +168,7 @@ def test_server_pagination(client): server_data = { "hostname": f"server-{i:02d}", "ip_address": f"192.168.1.{i+1}", - "state": "active" + "state": "active", } client.post("/servers/", json=server_data) diff --git a/tests/test_server_service.py b/tests/test_server_service.py index 22ac96d..125b47f 100644 --- a/tests/test_server_service.py +++ b/tests/test_server_service.py @@ -1,12 +1,13 @@ """ Unit tests for server service layer. """ -import pytest -from unittest.mock import Mock, patch from datetime import datetime +from unittest.mock import Mock, patch +import pytest + +from app.schemas.server import ServerCreate, ServerResponse, ServerState, ServerUpdate from app.services.server_service import ServerService -from app.schemas.server import ServerCreate, ServerUpdate, ServerState, ServerResponse @pytest.fixture @@ -24,8 +25,8 @@ def mock_server_model(): @pytest.fixture def service_with_mocks(mock_db, mock_server_model): """Create a service with mocked dependencies.""" - with patch('app.services.server_service.get_db', return_value=mock_db): - with patch('app.services.server_service.ServerModel', return_value=mock_server_model): + with patch("app.services.server_service.get_db", return_value=mock_db): + with patch("app.services.server_service.ServerModel", return_value=mock_server_model): service = ServerService() service.server_model = mock_server_model return service @@ -35,13 +36,13 @@ def service_with_mocks(mock_db, mock_server_model): def sample_server_dict(): """Sample server dictionary as returned from database.""" return { - 'id': 1, - 'hostname': 'web-server-01', - 'ip_address': '192.168.1.100', - 'name': 'Web Server 1', - 'state': 'active', - 'created_at': datetime(2024, 1, 15, 10, 30, 0), - 'updated_at': datetime(2024, 1, 15, 10, 30, 0) + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active", + "created_at": datetime(2024, 1, 15, 10, 30, 0), + "updated_at": datetime(2024, 1, 15, 10, 30, 0), } @@ -49,10 +50,10 @@ def sample_server_dict(): def sample_server_create(): """Sample server create schema.""" return ServerCreate( - hostname='web-server-01', - ip_address='192.168.1.100', - name='Web Server 1', - state=ServerState.ACTIVE + hostname="web-server-01", + ip_address="192.168.1.100", + name="Web Server 1", + state=ServerState.ACTIVE, ) @@ -60,7 +61,9 @@ def sample_server_create(): class TestCreateServer: """Tests for create_server method.""" - def test_create_server_success(self, service_with_mocks, sample_server_create, sample_server_dict): + def test_create_server_success( + self, service_with_mocks, sample_server_create, sample_server_dict + ): """Test successful server creation.""" # Setup mocks service_with_mocks.server_model.get_by_hostname.return_value = None @@ -73,13 +76,15 @@ def test_create_server_success(self, service_with_mocks, sample_server_create, s # Verify assert isinstance(result, ServerResponse) assert result.id == 1 - assert result.hostname == 'web-server-01' - assert result.ip_address == '192.168.1.100' - service_with_mocks.server_model.get_by_hostname.assert_called_once_with('web-server-01') + assert result.hostname == "web-server-01" + assert result.ip_address == "192.168.1.100" + service_with_mocks.server_model.get_by_hostname.assert_called_once_with("web-server-01") service_with_mocks.server_model.create.assert_called_once_with(sample_server_create) service_with_mocks.server_model.get_by_id.assert_called_once_with(1) - def test_create_server_duplicate_hostname(self, service_with_mocks, sample_server_create, sample_server_dict): + def test_create_server_duplicate_hostname( + self, service_with_mocks, sample_server_create, sample_server_dict + ): """Test creating server with duplicate hostname raises error.""" # Setup mocks - hostname already exists service_with_mocks.server_model.get_by_hostname.return_value = sample_server_dict @@ -101,13 +106,11 @@ def test_create_server_different_states(self, service_with_mocks, sample_server_ service_with_mocks.server_model.create.return_value = 1 server_dict = sample_server_dict.copy() - server_dict['state'] = state.value + server_dict["state"] = state.value service_with_mocks.server_model.get_by_id.return_value = server_dict server_create = ServerCreate( - hostname=f'server-{state.value}', - ip_address='10.0.0.1', - state=state + hostname=f"server-{state.value}", ip_address="10.0.0.1", state=state ) result = service_with_mocks.create_server(server_create) @@ -126,7 +129,7 @@ def test_get_server_success(self, service_with_mocks, sample_server_dict): assert isinstance(result, ServerResponse) assert result.id == 1 - assert result.hostname == 'web-server-01' + assert result.hostname == "web-server-01" service_with_mocks.server_model.get_by_id.assert_called_once_with(1) def test_get_server_not_found(self, service_with_mocks): @@ -156,13 +159,13 @@ def test_get_servers_multiple(self, service_with_mocks): """Test getting multiple servers.""" servers = [ { - 'id': i, - 'hostname': f'server-{i:02d}', - 'ip_address': f'10.0.0.{i}', - 'name': f'Server {i}', - 'state': 'active', - 'created_at': datetime(2024, 1, 15, 10, 30, 0), - 'updated_at': datetime(2024, 1, 15, 10, 30, 0) + "id": i, + "hostname": f"server-{i:02d}", + "ip_address": f"10.0.0.{i}", + "name": f"Server {i}", + "state": "active", + "created_at": datetime(2024, 1, 15, 10, 30, 0), + "updated_at": datetime(2024, 1, 15, 10, 30, 0), } for i in range(1, 6) ] @@ -172,8 +175,8 @@ def test_get_servers_multiple(self, service_with_mocks): assert len(result) == 5 assert all(isinstance(s, ServerResponse) for s in result) - assert result[0].hostname == 'server-01' - assert result[4].hostname == 'server-05' + assert result[0].hostname == "server-01" + assert result[4].hostname == "server-05" def test_get_servers_with_pagination(self, service_with_mocks): """Test getting servers with pagination parameters.""" @@ -192,31 +195,31 @@ def test_update_server_success(self, service_with_mocks, sample_server_dict): """Test successfully updating a server.""" # Setup updated_dict = sample_server_dict.copy() - updated_dict['name'] = 'Updated Server' - updated_dict['state'] = 'offline' + updated_dict["name"] = "Updated Server" + updated_dict["state"] = "offline" service_with_mocks.server_model.get_by_id.side_effect = [ sample_server_dict, # First call to check existence - updated_dict # Second call to get updated server + updated_dict, # Second call to get updated server ] service_with_mocks.server_model.update.return_value = 1 - update_data = ServerUpdate(name='Updated Server', state=ServerState.OFFLINE) + update_data = ServerUpdate(name="Updated Server", state=ServerState.OFFLINE) # Execute result = service_with_mocks.update_server(1, update_data) # Verify assert isinstance(result, ServerResponse) - assert result.name == 'Updated Server' - assert result.state == 'offline' + assert result.name == "Updated Server" + assert result.state == "offline" service_with_mocks.server_model.update.assert_called_once_with(1, update_data) def test_update_server_not_found(self, service_with_mocks): """Test updating a server that doesn't exist.""" service_with_mocks.server_model.get_by_id.return_value = None - update_data = ServerUpdate(name='New Name') + update_data = ServerUpdate(name="New Name") result = service_with_mocks.update_server(99999, update_data) assert result is None @@ -227,13 +230,13 @@ def test_update_server_hostname_to_existing(self, service_with_mocks, sample_ser # Setup existing_server = sample_server_dict.copy() another_server = sample_server_dict.copy() - another_server['id'] = 2 - another_server['hostname'] = 'another-server' + another_server["id"] = 2 + another_server["hostname"] = "another-server" service_with_mocks.server_model.get_by_id.return_value = another_server service_with_mocks.server_model.get_by_hostname.return_value = existing_server - update_data = ServerUpdate(hostname='web-server-01') + update_data = ServerUpdate(hostname="web-server-01") # Execute and verify with pytest.raises(ValueError) as exc_info: @@ -246,13 +249,10 @@ def test_update_server_hostname_to_same(self, service_with_mocks, sample_server_ """Test updating hostname to the same value (should succeed).""" updated_dict = sample_server_dict.copy() - service_with_mocks.server_model.get_by_id.side_effect = [ - sample_server_dict, - updated_dict - ] + service_with_mocks.server_model.get_by_id.side_effect = [sample_server_dict, updated_dict] service_with_mocks.server_model.update.return_value = 1 - update_data = ServerUpdate(hostname='web-server-01') # Same hostname + update_data = ServerUpdate(hostname="web-server-01") # Same hostname result = service_with_mocks.update_server(1, update_data) @@ -263,20 +263,17 @@ def test_update_server_hostname_to_same(self, service_with_mocks, sample_server_ def test_update_server_partial(self, service_with_mocks, sample_server_dict): """Test partial update (only some fields).""" updated_dict = sample_server_dict.copy() - updated_dict['state'] = 'retired' + updated_dict["state"] = "retired" - service_with_mocks.server_model.get_by_id.side_effect = [ - sample_server_dict, - updated_dict - ] + service_with_mocks.server_model.get_by_id.side_effect = [sample_server_dict, updated_dict] service_with_mocks.server_model.update.return_value = 1 update_data = ServerUpdate(state=ServerState.RETIRED) result = service_with_mocks.update_server(1, update_data) - assert result.state == 'retired' - assert result.hostname == 'web-server-01' # Unchanged + assert result.state == "retired" + assert result.hostname == "web-server-01" # Unchanged @pytest.mark.unit @@ -312,27 +309,25 @@ def test_create_server_without_name(self, service_with_mocks, sample_server_dict service_with_mocks.server_model.create.return_value = 1 server_dict = sample_server_dict.copy() - server_dict['hostname'] = 'no-name-server' - server_dict['ip_address'] = '10.0.0.1' - server_dict['name'] = None + server_dict["hostname"] = "no-name-server" + server_dict["ip_address"] = "10.0.0.1" + server_dict["name"] = None service_with_mocks.server_model.get_by_id.return_value = server_dict server_create = ServerCreate( - hostname='no-name-server', - ip_address='10.0.0.1', - state=ServerState.ACTIVE + hostname="no-name-server", ip_address="10.0.0.1", state=ServerState.ACTIVE ) result = service_with_mocks.create_server(server_create) assert result.name is None - assert result.hostname == 'no-name-server' + assert result.hostname == "no-name-server" def test_update_server_empty_update(self, service_with_mocks, sample_server_dict): """Test update with no fields changed still works.""" service_with_mocks.server_model.get_by_id.side_effect = [ sample_server_dict, - sample_server_dict + sample_server_dict, ] service_with_mocks.server_model.update.return_value = 1 @@ -347,8 +342,8 @@ def test_update_server_empty_update(self, service_with_mocks, sample_server_dict def test_service_initialization(self): """Test that service initializes correctly with real dependencies.""" - with patch('app.services.server_service.get_db') as mock_get_db: - with patch('app.services.server_service.ServerModel') as mock_model_class: + with patch("app.services.server_service.get_db") as mock_get_db: + with patch("app.services.server_service.ServerModel") as mock_model_class: mock_db = Mock() mock_get_db.return_value = mock_db From 843a830cd39f8b0c7949ec1e61b9f237e02a4d0f Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 17:48:40 +0100 Subject: [PATCH 14/17] Fix linter errors Fix warnings and errors found by ruff --- app/api/servers.py | 4 ++-- app/cli.py | 11 +++++------ app/models/server.py | 5 ++--- app/schemas/server.py | 21 ++++++++++----------- app/services/server_service.py | 7 ++----- tests/conftest.py | 6 +++--- 6 files changed, 24 insertions(+), 30 deletions(-) diff --git a/app/api/servers.py b/app/api/servers.py index e952f6a..e430cbb 100644 --- a/app/api/servers.py +++ b/app/api/servers.py @@ -17,7 +17,7 @@ async def create_server(server: ServerCreate): try: return service.create_server(server) except ValueError as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e @router.get("/", response_model=list[ServerResponse]) @@ -55,7 +55,7 @@ async def update_server(server_id: int, server_update: ServerUpdate): ) return server except ValueError as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e @router.delete("/{server_id}", status_code=status.HTTP_204_NO_CONTENT) diff --git a/app/cli.py b/app/cli.py index f60f0c3..0ee1260 100644 --- a/app/cli.py +++ b/app/cli.py @@ -2,7 +2,6 @@ CLI for server inventory management. """ import sys -from typing import Optional import click from tabulate import tabulate @@ -28,7 +27,7 @@ def cli(): default="active", help="Server state (default: active)", ) -def create(hostname: str, ip_address: str, name: Optional[str], state: str): +def create(hostname: str, ip_address: str, name: str | None, state: str): """Create a new server.""" try: service = ServerService() @@ -166,10 +165,10 @@ def get(server_id: int, output_format: str): ) def update( server_id: int, - hostname: Optional[str], - ip_address: Optional[str], - name: Optional[str], - state: Optional[str], + hostname: str | None, + ip_address: str | None, + name: str | None, + state: str | None, ): """Update server by ID.""" try: diff --git a/app/models/server.py b/app/models/server.py index 02e8928..faf1144 100644 --- a/app/models/server.py +++ b/app/models/server.py @@ -2,7 +2,6 @@ Server data access layer using raw SQL. """ from datetime import datetime -from typing import Optional from app.db.database import Database from app.schemas.server import ServerCreate, ServerUpdate @@ -23,7 +22,7 @@ def create(self, server: ServerCreate) -> int: params = (server.hostname, server.ip_address, server.name, server.state.value) return self.db.execute_insert(query, params) - def get_by_id(self, server_id: int) -> Optional[dict]: + def get_by_id(self, server_id: int) -> dict | None: """Get server by ID.""" query = "SELECT * FROM servers WHERE id = %s" results = self.db.execute_query(query, (server_id,)) @@ -31,7 +30,7 @@ def get_by_id(self, server_id: int) -> Optional[dict]: return dict(results[0]) return None - def get_by_hostname(self, hostname: str) -> Optional[dict]: + def get_by_hostname(self, hostname: str) -> dict | None: """Get server by hostname.""" query = "SELECT * FROM servers WHERE hostname = %s" results = self.db.execute_query(query, (hostname,)) diff --git a/app/schemas/server.py b/app/schemas/server.py index 4f1cd87..2497fb4 100644 --- a/app/schemas/server.py +++ b/app/schemas/server.py @@ -4,7 +4,6 @@ import ipaddress from datetime import datetime from enum import Enum -from typing import Optional from pydantic import BaseModel, Field, field_validator @@ -22,7 +21,7 @@ class ServerBase(BaseModel): hostname: str = Field(..., min_length=1, max_length=255, description="Unique server hostname") ip_address: str = Field(..., description="Server IP address") - name: Optional[str] = Field(None, max_length=255, description="Server name/description") + name: str | None = Field(None, max_length=255, description="Server name/description") state: ServerState = Field(default=ServerState.ACTIVE, description="Server state") @field_validator("ip_address") @@ -33,8 +32,8 @@ def validate_ip_address(cls, v: str) -> str: # This will raise ValueError if invalid ipaddress.ip_address(v) return v - except ValueError: - raise ValueError(f"Invalid IP address: {v}") + except ValueError as e: + raise ValueError(f"Invalid IP address: {v}") from e class ServerCreate(ServerBase): @@ -46,21 +45,21 @@ class ServerCreate(ServerBase): class ServerUpdate(BaseModel): """Schema for updating a server.""" - hostname: Optional[str] = Field(None, min_length=1, max_length=255) - ip_address: Optional[str] = None - name: Optional[str] = Field(None, max_length=255) - state: Optional[ServerState] = None + hostname: str | None = Field(None, min_length=1, max_length=255) + ip_address: str | None = None + name: str | None = Field(None, max_length=255) + state: ServerState | None = None @field_validator("ip_address") @classmethod - def validate_ip_address(cls, v: Optional[str]) -> Optional[str]: + def validate_ip_address(cls, v: str | None) -> str | None: """Validate that ip_address is a valid IP address if provided.""" if v is not None: try: ipaddress.ip_address(v) return v - except ValueError: - raise ValueError(f"Invalid IP address: {v}") + except ValueError as e: + raise ValueError(f"Invalid IP address: {v}") from e return v diff --git a/app/services/server_service.py b/app/services/server_service.py index b00669a..60beb44 100644 --- a/app/services/server_service.py +++ b/app/services/server_service.py @@ -1,7 +1,6 @@ """ Server service layer with business logic. """ -from typing import Optional from app.db.database import get_db from app.models.server import ServerModel @@ -28,7 +27,7 @@ def create_server(self, server: ServerCreate) -> ServerResponse: server_dict = self.server_model.get_by_id(server_id) return ServerResponse(**server_dict) - def get_server(self, server_id: int) -> Optional[ServerResponse]: + def get_server(self, server_id: int) -> ServerResponse | None: """Get server by ID.""" server_dict = self.server_model.get_by_id(server_id) if server_dict: @@ -40,9 +39,7 @@ def get_servers(self, skip: int = 0, limit: int = 100) -> list[ServerResponse]: servers = self.server_model.get_all(skip, limit) return [ServerResponse(**server) for server in servers] - def update_server( - self, server_id: int, server_update: ServerUpdate - ) -> Optional[ServerResponse]: + def update_server(self, server_id: int, server_update: ServerUpdate) -> ServerResponse | None: """Update server.""" # Check if server exists existing_server = self.server_model.get_by_id(server_id) diff --git a/tests/conftest.py b/tests/conftest.py index 6d357b4..d2db844 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,13 +7,13 @@ from fastapi.testclient import TestClient from testcontainers.postgres import PostgresContainer -# Set test environment BEFORE importing app -os.environ["APP_ENV"] = "testing" - from app.db.database import Database from app.db.schema import create_tables, drop_tables from app.main import app +# Set test environment BEFORE importing app +os.environ["APP_ENV"] = "testing" + @pytest.fixture(scope="session") def postgres_container(): From a0f14f728160686628d52ef0d747ac68ef6ec867 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 18:51:18 +0100 Subject: [PATCH 15/17] Add Dockerfile and Compose Add Dockerfile to build the image Update Docker Compose and add app into it Assumption is to run production env with Docker Compose --- .dockerignore | 9 +++-- .env.example | 3 -- .env.production.example | 1 - API.md | 35 ++++++++++++++++-- Dockerfile | 79 +++++++++++++++++++++++++++++++++++++++++ Makefile | 24 +++++++++++-- app/core/config.py | 18 +++++----- docker-compose.yml | 36 ++++++++++--------- scripts/init-db.sql | 4 --- tests/conftest.py | 21 ++++++++--- 10 files changed, 185 insertions(+), 45 deletions(-) create mode 100644 Dockerfile diff --git a/.dockerignore b/.dockerignore index c9415c1..a2157a3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -41,8 +41,8 @@ htmlcov/ # Environment .env -.env.local -.env.production +.env.* +!.env.example # Git .git/ @@ -62,3 +62,8 @@ Dockerfile # Other .DS_Store *.log +Makefile +tests/ +scripts/ +bruno/ +TESTING.md diff --git a/.env.example b/.env.example index 5824991..5b69c55 100644 --- a/.env.example +++ b/.env.example @@ -23,9 +23,6 @@ LOG_LEVEL=DEBUG # API Settings API_V1_PREFIX=/api/v1 -# Security -SECRET_KEY=dev-secret-key-change-in-production - # External Services (optional) - Uncomment and set if needed # REDIS_URL=redis://localhost:6379/0 # EXTERNAL_API_URL=http://localhost:9000 diff --git a/.env.production.example b/.env.production.example index de6f818..8224f09 100644 --- a/.env.production.example +++ b/.env.production.example @@ -5,4 +5,3 @@ DEBUG=false LOG_LEVEL=WARNING ALLOWED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com ENABLE_CORS=true -SECRET_KEY= # REQUIRED: Must be set via environment variable diff --git a/API.md b/API.md index 0c0b483..53bb078 100644 --- a/API.md +++ b/API.md @@ -2,15 +2,44 @@ ## Running the Application -### Using Docker Compose +### Using Docker Compose (Recommended) -Start the full stack (API + Database): +#### Option 1: Full Stack (Database + API) + +Start the complete stack with one command: ```bash +# Build and start everything +make docker-build +make docker-up-full + +# Or using docker-compose directly docker-compose up -d ``` -The API will be available at `http://localhost:8000` +The API will be available at: +- API: `http://localhost:8000` +- API Docs (Swagger): `http://localhost:8000/docs` +- Database: `localhost:5432` + +Stop the stack: +```bash +make docker-down +# or +docker-compose down +``` + +#### Option 2: Database Only (for local development) + +Start just the PostgreSQL database: + +```bash +make docker-up +# or +docker-compose up -d postgres +``` + +Then run the API locally (see "Running Locally" section below). ### Running Locally diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6cd8bf4 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,79 @@ +# Multi-stage Dockerfile for Server Inventory Management API + +# Stage 1: Build stage +FROM python:3.12-slim AS builder + +# Set environment variables +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + POETRY_VERSION=1.7.1 \ + POETRY_HOME="/opt/poetry" \ + POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=true \ + POETRY_VIRTUALENVS_CREATE=true + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + build-essential \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install Poetry +RUN curl -sSL https://install.python-poetry.org | python3 - && \ + cd /usr/local/bin && \ + ln -s /opt/poetry/bin/poetry + +# Set working directory +WORKDIR /app + +# Copy dependency files +COPY pyproject.toml poetry.lock ./ + +# Install dependencies (production only) +RUN poetry install --only main --no-root --no-directory + +# Stage 2: Runtime stage +FROM python:3.12-slim AS runtime + +# Set environment variables +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PATH="/app/.venv/bin:$PATH" \ + APP_ENV=production + +# Install runtime dependencies +RUN apt-get update && apt-get install -y \ + libpq5 \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +# Create non-root user +RUN groupadd -r appuser && \ + useradd -r -g appuser -u 1000 appuser && \ + mkdir -p /app && \ + chown -R appuser:appuser /app + +# Set working directory +WORKDIR /app + +# Copy virtual environment from builder +COPY --from=builder --chown=appuser:appuser /app/.venv /app/.venv + +# Copy application code +COPY --chown=appuser:appuser app/ /app/app/ + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5)" + +# Run the application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/Makefile b/Makefile index b5c27b2..292755b 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit docker-up docker-down docker-logs docker-clean run run-dev run-prod test test-cov format lint ruff check clean +.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit docker-build docker-up docker-up-full docker-down docker-logs docker-clean run run-dev run-prod test test-cov format lint ruff check clean help: @echo "Available commands:" @@ -11,7 +11,9 @@ help: @echo " make setup-precommit - Install pre-commit hooks" @echo "" @echo "Docker commands:" + @echo " make docker-build - Build Docker image for the application" @echo " make docker-up - Start PostgreSQL database with Docker Compose" + @echo " make docker-up-full - Start full stack (database + app) with Docker Compose" @echo " make docker-down - Stop Docker Compose services" @echo " make docker-logs - View Docker Compose logs" @echo " make docker-clean - Stop services and remove volumes" @@ -116,9 +118,14 @@ install-prod: poetry install --only main # Docker Compose commands +docker-build: + @echo "🔨 Building Docker image..." + docker build -t server-inventory-app:latest . + @echo "✅ Docker image built successfully!" + docker-up: @echo "🐳 Starting PostgreSQL database..." - docker-compose up -d + docker-compose up -d postgres @echo "⏳ Waiting for PostgreSQL to be ready..." @sleep 3 @echo "✅ PostgreSQL is running!" @@ -128,7 +135,18 @@ docker-up: @echo " Port: 5432" @echo " User: postgres" @echo " Password: postgres" - @echo " Databases: app_db, test_db" + @echo " Database: app_db" + @echo "" + +docker-up-full: + @echo "🐳 Starting full stack (database + app)..." + docker-compose up -d + @echo "⏳ Waiting for services to be ready..." + @sleep 5 + @echo "✅ Services are running!" + @echo "" + @echo "API is available at: http://localhost:8000" + @echo "API docs at: http://localhost:8000/docs" @echo "" docker-down: diff --git a/app/core/config.py b/app/core/config.py index 3d77a55..90c292a 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -36,9 +36,6 @@ class SettingsBase(BaseSettings): REDIS_URL: str = "" EXTERNAL_API_URL: str = "" - # Security - SECRET_KEY: str = "" - model_config = SettingsConfigDict( env_file=(".env", f".env.{os.getenv('APP_ENV', 'development')}"), env_file_encoding="utf-8", @@ -65,19 +62,22 @@ class DevelopmentSettings(SettingsBase): DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/app_db" TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" allowed_origins_str: str = "http://localhost:3000,http://localhost:8000,http://localhost:5173" - SECRET_KEY: str = "dev-secret-key-change-in-production" class TestingSettings(SettingsBase): - """Testing environment settings.""" + """Testing environment settings. + + Note: DATABASE_URL is a placeholder - testcontainers will override this + during test execution with a temporary PostgreSQL container. + """ APP_ENV: str = "testing" DEBUG: bool = True LOG_LEVEL: str = "INFO" - DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" - TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" + # Placeholder - testcontainers override this during test execution + DATABASE_URL: str = "postgresql://placeholder-overridden-by-testcontainers" + TEST_DATABASE_URL: str = "" allowed_origins_str: str = "*" - SECRET_KEY: str = "test-secret-key" class ProductionSettings(SettingsBase): @@ -87,7 +87,7 @@ class ProductionSettings(SettingsBase): DEBUG: bool = False LOG_LEVEL: str = "WARNING" - @field_validator("SECRET_KEY", "DATABASE_URL") + @field_validator("DATABASE_URL") @classmethod def validate_required_fields(cls, v, info): """Ensure critical fields are set in production.""" diff --git a/docker-compose.yml b/docker-compose.yml index 0ab2f0e..46b8e10 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -20,22 +20,26 @@ services: retries: 5 restart: unless-stopped - # Uncomment to run the application in Docker - # app: - # build: . - # container_name: server-inventory-app - # environment: - # APP_ENV: development - # DATABASE_URL: postgresql://postgres:postgres@postgres:5432/app_db - # TEST_DATABASE_URL: postgresql://postgres:postgres@postgres:5432/test_db - # ports: - # - "8000:8000" - # volumes: - # - ./app:/app/app - # depends_on: - # postgres: - # condition: service_healthy - # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload + app: + build: + context: . + dockerfile: Dockerfile + container_name: server-inventory-app + environment: + APP_ENV: production + DATABASE_URL: postgresql://postgres:postgres@postgres:5432/app_db + ports: + - "8000:8000" + depends_on: + postgres: + condition: service_healthy + restart: unless-stopped + healthcheck: + test: ["CMD", "python", "-c", "import requests; requests.get('http://localhost:8000/health', timeout=5)"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s volumes: postgres_data: diff --git a/scripts/init-db.sql b/scripts/init-db.sql index 13ef36b..f1729b5 100644 --- a/scripts/init-db.sql +++ b/scripts/init-db.sql @@ -1,6 +1,2 @@ --- Create test database -CREATE DATABASE test_db; - -- Grant privileges to postgres user (already has superuser, but being explicit) GRANT ALL PRIVILEGES ON DATABASE app_db TO postgres; -GRANT ALL PRIVILEGES ON DATABASE test_db TO postgres; diff --git a/tests/conftest.py b/tests/conftest.py index d2db844..fc19774 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,21 +25,34 @@ def postgres_container(): @pytest.fixture(scope="session") def test_db(postgres_container): """Create a test database using testcontainer.""" + import app.db.database as db_module + # Get connection URL from testcontainer db_url = postgres_container.get_connection_url() - # Override the database connection for tests - db = Database(db_url) + # Convert SQLAlchemy-style URL to psycopg2 format + # testcontainers returns postgresql+psycopg2:// but we need postgresql:// + db_url = db_url.replace("postgresql+psycopg2://", "postgresql://") + + # Create test database instance + test_db_instance = Database(db_url) + + # Override the global db instance for tests + original_db = db_module.db + db_module.db = test_db_instance # Drop and create tables drop_tables() create_tables() - yield db + yield test_db_instance # Cleanup drop_tables() - db.close() + test_db_instance.close() + + # Restore original db instance + db_module.db = original_db @pytest.fixture(scope="function") From 6c2c5007710e62af433718f11c0bc604af79b7b3 Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 19:29:28 +0100 Subject: [PATCH 16/17] Cleanup and API.md creation Cleanup some unused options like test db Create docker compose only for dev database Use detailed API-Detailed.md for service docs Use shorted API.md for quick start --- .env.development | 3 +- .env.example | 4 +- API-Detailed.md | 333 +++++++++++++++++++++++++++++++++++++++ API.md | 342 +++++++++++------------------------------ Makefile | 55 ++++--- app/core/config.py | 4 +- docker-compose.dev.yml | 24 +++ 7 files changed, 485 insertions(+), 280 deletions(-) create mode 100644 API-Detailed.md create mode 100644 docker-compose.dev.yml diff --git a/.env.development b/.env.development index cc395a8..e705459 100644 --- a/.env.development +++ b/.env.development @@ -1,6 +1,5 @@ APP_ENV=development -DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db -TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +DATABASE_URL=postgresql://dev:postgres@localhost:5432/app_db DEBUG=true LOG_LEVEL=DEBUG ALLOWED_ORIGINS=http://localhost:3000,http://localhost:8000,http://localhost:5173 diff --git a/.env.example b/.env.example index 5b69c55..9ee5e55 100644 --- a/.env.example +++ b/.env.example @@ -9,8 +9,8 @@ DESCRIPTION=FastAPI project with raw SQL database layer # Database Settings # Format: postgresql://user:password@host:port/database -DATABASE_URL=postgresql://postgres:postgres@localhost:5432/app_db -TEST_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/test_db +DATABASE_URL=postgresql://dev:postgres@localhost:5432/app_db +TEST_DATABASE_URL= # CORS Settings (comma-separated) ALLOWED_ORIGINS=http://localhost:3000,http://localhost:8000 diff --git a/API-Detailed.md b/API-Detailed.md new file mode 100644 index 0000000..9ab7d0f --- /dev/null +++ b/API-Detailed.md @@ -0,0 +1,333 @@ +# Server Inventory Management - API & CLI Documentation + +## Running the Application + +### Using Docker Compose (Recommended) + +#### Option 1: Full Stack (Database + API) + +Start the complete stack with one command: + +```bash +# Build and start everything +make docker-build +make docker-up-full + +# Or using docker-compose directly +docker-compose up -d +``` + +The API will be available at: +- API: `http://localhost:8000` +- API Docs (Swagger): `http://localhost:8000/docs` +- Database: `localhost:5432` + +Stop the stack: +```bash +make docker-down +# or +docker-compose down +``` + +#### Option 2: Database Only (for local development) + +Start just the PostgreSQL database for development: + +```bash +# Start development database (recommended for local dev) +make dev-db + +# Stop development database +make dev-db-down + +# Stop and remove volumes (clean slate) +make dev-db-clean +``` + +**Development database credentials:** +- Host: `localhost` +- Port: `5432` +- User: `dev` +- Password: `postgres` +- Database: `app_db` + +Then run the API locally (see "Running Locally" section below). + +### Running Locally + +1. Install dependencies: +```bash +poetry install --with dev +``` + +2. Start PostgreSQL (via Docker Compose or locally) + +3. Run the API server: +```bash +poetry run uvicorn app.main:app --reload +``` + +## API Specification + +Base URL: `http://localhost:8000` + +Interactive API docs: `http://localhost:8000/docs` + +### Endpoints + +#### Create Server +```http +POST /servers +Content-Type: application/json + +{ + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active" +} +``` + +**Response:** `201 Created` +```json +{ + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active", + "created_at": "2024-01-15T10:30:00", + "updated_at": "2024-01-15T10:30:00" +} +``` + +#### List All Servers +```http +GET /servers?skip=0&limit=100 +``` + +**Response:** `200 OK` +```json +[ + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "name": "Web Server 1", + "state": "active", + "created_at": "2024-01-15T10:30:00", + "updated_at": "2024-01-15T10:30:00" + } +] +``` + +#### Get Server by ID +```http +GET /servers/{id} +``` + +**Response:** `200 OK` (same format as create response) + +#### Update Server +```http +PUT /servers/{id} +Content-Type: application/json + +{ + "hostname": "web-server-01-updated", + "ip_address": "192.168.1.101", + "name": "Updated Web Server", + "state": "offline" +} +``` + +**Response:** `200 OK` (updated server object) + +**Note:** All fields are optional in update requests + +#### Delete Server +```http +DELETE /servers/{id} +``` + +**Response:** `204 No Content` + +### Validation Rules + +- **hostname**: Must be unique, 1-255 characters +- **ip_address**: Must be a valid IPv4 or IPv6 address +- **state**: One of: `active`, `offline`, `retired` +- **name**: Optional, max 255 characters + +### Error Responses + +**400 Bad Request:** +```json +{ + "detail": "Server with hostname 'web-server-01' already exists" +} +``` + +**404 Not Found:** +```json +{ + "detail": "Server with id 1 not found" +} +``` + +## CLI Specification + +The CLI tool `server-mgmt` provides the same functionality as the API. + +### Installation + +After installing dependencies with Poetry, the CLI is automatically available: + +```bash +poetry install +``` + +### Commands + +#### Create a Server +```bash +poetry run server-mgmt create \ + --hostname web-server-01 \ + --ip-address 192.168.1.100 \ + --name "Web Server 1" \ + --state active +``` + +**Options:** +- `--hostname` (required): Unique server hostname +- `--ip-address` (required): Server IP address +- `--name` (optional): Server name/description +- `--state` (optional): Server state (default: active) + +#### List All Servers +```bash +# Table format (default) +poetry run server-mgmt list + +# JSON format +poetry run server-mgmt list --format json + +# With pagination +poetry run server-mgmt list --skip 10 --limit 20 +``` + +**Options:** +- `--skip`: Number of records to skip (default: 0) +- `--limit`: Maximum records to return (default: 100) +- `--format`: Output format - `table` or `json` (default: table) + +#### Get Server by ID +```bash +# Table format +poetry run server-mgmt get 1 + +# JSON format +poetry run server-mgmt get 1 --format json +``` + +#### Update Server +```bash +poetry run server-mgmt update 1 \ + --hostname web-server-01-updated \ + --ip-address 192.168.1.101 \ + --name "Updated Web Server" \ + --state offline +``` + +**Options:** (all optional, but at least one required) +- `--hostname`: New server hostname +- `--ip-address`: New server IP address +- `--name`: New server name/description +- `--state`: New server state + +#### Delete Server +```bash +# With confirmation prompt +poetry run server-mgmt delete 1 + +# Skip confirmation +poetry run server-mgmt delete 1 --yes +``` + +**Options:** +- `--yes`, `-y`: Skip confirmation prompt + +### CLI Help + +Get help for any command: +```bash +poetry run server-mgmt --help +poetry run server-mgmt create --help +poetry run server-mgmt list --help +# etc. +``` + +## Examples + +### API Example (using curl) + +```bash +# Create a server +curl -X POST http://localhost:8000/servers \ + -H "Content-Type: application/json" \ + -d '{ + "hostname": "db-server-01", + "ip_address": "10.0.1.50", + "name": "Primary Database", + "state": "active" + }' + +# List servers +curl http://localhost:8000/servers + +# Get specific server +curl http://localhost:8000/servers/1 + +# Update server +curl -X PUT http://localhost:8000/servers/1 \ + -H "Content-Type: application/json" \ + -d '{"state": "offline"}' + +# Delete server +curl -X DELETE http://localhost:8000/servers/1 +``` + +### CLI Example + +```bash +# Create a server +poetry run server-mgmt create \ + --hostname db-server-01 \ + --ip-address 10.0.1.50 \ + --name "Primary Database" \ + --state active + +# List all servers +poetry run server-mgmt list + +# Get server details +poetry run server-mgmt get 1 + +# Update server state +poetry run server-mgmt update 1 --state offline + +# Delete server +poetry run server-mgmt delete 1 --yes +``` + +## Testing + +Run the test suite: +```bash +poetry run pytest +``` + +Run with coverage: +```bash +poetry run pytest --cov=app tests/ +``` diff --git a/API.md b/API.md index 53bb078..2c6ed77 100644 --- a/API.md +++ b/API.md @@ -1,321 +1,157 @@ -# Server Inventory Management - API & CLI Documentation +# Server Inventory Management - Quick Start -## Running the Application +## Quick Start -### Using Docker Compose (Recommended) - -#### Option 1: Full Stack (Database + API) - -Start the complete stack with one command: +### Running with Docker (Recommended) ```bash -# Build and start everything -make docker-build -make docker-up-full - -# Or using docker-compose directly +# Full stack (API + Database) docker-compose up -d -``` - -The API will be available at: -- API: `http://localhost:8000` -- API Docs (Swagger): `http://localhost:8000/docs` -- Database: `localhost:5432` - -Stop the stack: -```bash -make docker-down -# or -docker-compose down -``` -#### Option 2: Database Only (for local development) - -Start just the PostgreSQL database: - -```bash -make docker-up -# or -docker-compose up -d postgres +# API available at: http://localhost:8000 +# API docs at: http://localhost:8000/docs ``` -Then run the API locally (see "Running Locally" section below). - ### Running Locally -1. Install dependencies: ```bash +# 1. Install dependencies poetry install --with dev -``` -2. Start PostgreSQL (via Docker Compose or locally) +# 2. Start development database +make dev-db -3. Run the API server: -```bash -poetry run uvicorn app.main:app --reload +# 3. Run the API +make run-dev + +# API available at: http://localhost:8000 ``` ## API Specification -Base URL: `http://localhost:8000` - -Interactive API docs: `http://localhost:8000/docs` +**Base URL:** `http://localhost:8000` ### Endpoints -#### Create Server -```http -POST /servers -Content-Type: application/json +| Method | Endpoint | Description | +|--------|----------|-------------| +| `POST` | `/servers` | Create a new server | +| `GET` | `/servers` | List all servers (with pagination) | +| `GET` | `/servers/{id}` | Get server by ID | +| `PUT` | `/servers/{id}` | Update server | +| `DELETE` | `/servers/{id}` | Delete server | -{ - "hostname": "web-server-01", - "ip_address": "192.168.1.100", - "name": "Web Server 1", - "state": "active" -} -``` - -**Response:** `201 Created` -```json -{ - "id": 1, - "hostname": "web-server-01", - "ip_address": "192.168.1.100", - "name": "Web Server 1", - "state": "active", - "created_at": "2024-01-15T10:30:00", - "updated_at": "2024-01-15T10:30:00" -} -``` +### Server Model -#### List All Servers -```http -GET /servers?skip=0&limit=100 -``` - -**Response:** `200 OK` ```json -[ - { - "id": 1, - "hostname": "web-server-01", - "ip_address": "192.168.1.100", - "name": "Web Server 1", - "state": "active", - "created_at": "2024-01-15T10:30:00", - "updated_at": "2024-01-15T10:30:00" - } -] -``` - -#### Get Server by ID -```http -GET /servers/{id} -``` - -**Response:** `200 OK` (same format as create response) - -#### Update Server -```http -PUT /servers/{id} -Content-Type: application/json - { - "hostname": "web-server-01-updated", - "ip_address": "192.168.1.101", - "name": "Updated Web Server", - "state": "offline" + "hostname": "web-server-01", // Required, unique + "ip_address": "192.168.1.100", // Required, valid IP + "name": "Web Server 1", // Optional + "state": "active" // Required: active|offline|retired } ``` -**Response:** `200 OK` (updated server object) +### Example API Usage -**Note:** All fields are optional in update requests - -#### Delete Server -```http -DELETE /servers/{id} -``` - -**Response:** `204 No Content` - -### Validation Rules +```bash +# Create a server +curl -X POST http://localhost:8000/servers \ + -H "Content-Type: application/json" \ + -d '{"hostname": "web-01", "ip_address": "192.168.1.100", "state": "active"}' -- **hostname**: Must be unique, 1-255 characters -- **ip_address**: Must be a valid IPv4 or IPv6 address -- **state**: One of: `active`, `offline`, `retired` -- **name**: Optional, max 255 characters +# List servers +curl http://localhost:8000/servers -### Error Responses +# Get server by ID +curl http://localhost:8000/servers/1 -**400 Bad Request:** -```json -{ - "detail": "Server with hostname 'web-server-01' already exists" -} -``` +# Update server +curl -X PUT http://localhost:8000/servers/1 \ + -H "Content-Type: application/json" \ + -d '{"state": "offline"}' -**404 Not Found:** -```json -{ - "detail": "Server with id 1 not found" -} +# Delete server +curl -X DELETE http://localhost:8000/servers/1 ``` ## CLI Specification -The CLI tool `server-mgmt` provides the same functionality as the API. +**Command:** `poetry run server-mgmt` -### Installation +### Commands -After installing dependencies with Poetry, the CLI is automatically available: +| Command | Description | +|---------|-------------| +| `create` | Create a new server | +| `list` | List all servers | +| `get ` | Get server by ID | +| `update ` | Update server | +| `delete ` | Delete server | -```bash -poetry install -``` +### CLI Usage Examples -### Commands - -#### Create a Server ```bash +# Create a server poetry run server-mgmt create \ - --hostname web-server-01 \ + --hostname web-01 \ --ip-address 192.168.1.100 \ - --name "Web Server 1" \ + --name "Web Server" \ --state active -``` - -**Options:** -- `--hostname` (required): Unique server hostname -- `--ip-address` (required): Server IP address -- `--name` (optional): Server name/description -- `--state` (optional): Server state (default: active) -#### List All Servers -```bash -# Table format (default) +# List servers (table format) poetry run server-mgmt list -# JSON format +# List servers (JSON format) poetry run server-mgmt list --format json -# With pagination -poetry run server-mgmt list --skip 10 --limit 20 -``` - -**Options:** -- `--skip`: Number of records to skip (default: 0) -- `--limit`: Maximum records to return (default: 100) -- `--format`: Output format - `table` or `json` (default: table) - -#### Get Server by ID -```bash -# Table format +# Get server by ID poetry run server-mgmt get 1 -# JSON format -poetry run server-mgmt get 1 --format json -``` - -#### Update Server -```bash -poetry run server-mgmt update 1 \ - --hostname web-server-01-updated \ - --ip-address 192.168.1.101 \ - --name "Updated Web Server" \ - --state offline -``` - -**Options:** (all optional, but at least one required) -- `--hostname`: New server hostname -- `--ip-address`: New server IP address -- `--name`: New server name/description -- `--state`: New server state +# Update server +poetry run server-mgmt update 1 --state offline --name "Offline Server" -#### Delete Server -```bash -# With confirmation prompt +# Delete server (with confirmation) poetry run server-mgmt delete 1 -# Skip confirmation +# Delete server (skip confirmation) poetry run server-mgmt delete 1 --yes ``` -**Options:** -- `--yes`, `-y`: Skip confirmation prompt - -### CLI Help - -Get help for any command: -```bash -poetry run server-mgmt --help -poetry run server-mgmt create --help -poetry run server-mgmt list --help -# etc. -``` - -## Examples +### CLI Options -### API Example (using curl) - -```bash -# Create a server -curl -X POST http://localhost:8000/servers \ - -H "Content-Type: application/json" \ - -d '{ - "hostname": "db-server-01", - "ip_address": "10.0.1.50", - "name": "Primary Database", - "state": "active" - }' - -# List servers -curl http://localhost:8000/servers - -# Get specific server -curl http://localhost:8000/servers/1 - -# Update server -curl -X PUT http://localhost:8000/servers/1 \ - -H "Content-Type: application/json" \ - -d '{"state": "offline"}' +**Create:** +- `--hostname` (required): Unique server hostname +- `--ip-address` (required): Valid IP address +- `--name` (optional): Server description +- `--state` (optional): Server state (default: active) -# Delete server -curl -X DELETE http://localhost:8000/servers/1 -``` +**List:** +- `--skip` (optional): Number of records to skip +- `--limit` (optional): Max records to return +- `--format` (optional): Output format (table|json) -### CLI Example +**Update:** +- `--hostname`: New hostname +- `--ip-address`: New IP address +- `--name`: New name +- `--state`: New state +- *At least one field required* -```bash -# Create a server -poetry run server-mgmt create \ - --hostname db-server-01 \ - --ip-address 10.0.1.50 \ - --name "Primary Database" \ - --state active +**Delete:** +- `--yes`, `-y`: Skip confirmation prompt -# List all servers -poetry run server-mgmt list +## Validation Rules -# Get server details -poetry run server-mgmt get 1 +- **hostname**: Must be unique, 1-255 characters +- **ip_address**: Must be a valid IPv4 or IPv6 address +- **state**: One of: `active`, `offline`, `retired` +- **name**: Optional, max 255 characters -# Update server state -poetry run server-mgmt update 1 --state offline +## Interactive API Documentation -# Delete server -poetry run server-mgmt delete 1 --yes -``` +Visit `http://localhost:8000/docs` for interactive Swagger UI documentation. -## Testing +--- -Run the test suite: -```bash -poetry run pytest -``` - -Run with coverage: -```bash -poetry run pytest --cov=app tests/ -``` +For detailed documentation, see [API-Detailed.md](./API-Detailed.md) diff --git a/Makefile b/Makefile index 292755b..d36f278 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help init setup check-poetry install install-prod setup-db setup-precommit docker-build docker-up docker-up-full docker-down docker-logs docker-clean run run-dev run-prod test test-cov format lint ruff check clean +.PHONY: help init setup check-poetry install install-prod dev-db dev-db-down dev-db-clean setup-precommit docker-build docker-up docker-up-full docker-down docker-logs docker-clean run run-dev run-prod test test-cov format lint ruff check clean help: @echo "Available commands:" @@ -7,9 +7,13 @@ help: @echo " make check-poetry - Check if Poetry is installed" @echo " make install - Install all dependencies (dev + prod)" @echo " make install-prod - Install production dependencies only" - @echo " make setup-db - Create PostgreSQL databases (app_db, test_db)" @echo " make setup-precommit - Install pre-commit hooks" @echo "" + @echo "Development database:" + @echo " make dev-db - Start development PostgreSQL database" + @echo " make dev-db-down - Stop development database" + @echo " make dev-db-clean - Stop development database and remove volumes" + @echo "" @echo "Docker commands:" @echo " make docker-build - Build Docker image for the application" @echo " make docker-up - Start PostgreSQL database with Docker Compose" @@ -27,7 +31,7 @@ help: @echo "" @echo "Code quality commands:" @echo " make format - Format code with ruff, black and isort" - @echo " make lint - Run linting with ruff, flake8 and mypy" + @echo " make lint - Run linting with ruff" @echo " make ruff - Run ruff linter and formatter" @echo " make check - Run all code quality checks" @echo " make clean - Clean up cache files" @@ -52,8 +56,8 @@ init: check-poetry @echo "" @echo "Next steps:" @echo " 1. Start PostgreSQL database:" - @echo " - Using Docker: make docker-up (recommended)" - @echo " - Or install PostgreSQL locally and run: make setup-db" + @echo " - For development: make dev-db (recommended)" + @echo " - Or full stack: make docker-up-full" @echo " 2. Run the app: make run" @echo " 3. Run tests: make test" @echo "" @@ -86,20 +90,31 @@ check-poetry: exit 1; \ fi -# Create PostgreSQL databases -setup-db: - @echo "🗄️ Creating PostgreSQL databases..." - @if command -v createdb >/dev/null 2>&1; then \ - createdb app_db 2>/dev/null && echo "✅ Created app_db" || echo "⚠️ app_db already exists or failed"; \ - createdb test_db 2>/dev/null && echo "✅ Created test_db" || echo "⚠️ test_db already exists or failed"; \ - else \ - echo "❌ createdb command not found. Please install PostgreSQL."; \ - echo ""; \ - echo "macOS: brew install postgresql"; \ - echo "Ubuntu/Debian: sudo apt-get install postgresql"; \ - echo ""; \ - exit 1; \ - fi +# Start development PostgreSQL database +dev-db: + @echo "🐳 Starting development PostgreSQL database..." + docker-compose -f docker-compose.dev.yml up -d + @echo "⏳ Waiting for PostgreSQL to be ready..." + @sleep 3 + @echo "✅ Development PostgreSQL is running!" + @echo "" + @echo "Connection details:" + @echo " Host: localhost" + @echo " Port: 5432" + @echo " User: dev" + @echo " Password: postgres" + @echo " Database: app_db" + @echo "" + +dev-db-down: + @echo "🛑 Stopping development PostgreSQL..." + docker-compose -f docker-compose.dev.yml down + @echo "✅ Development database stopped!" + +dev-db-clean: + @echo "🧹 Stopping development database and removing volumes..." + docker-compose -f docker-compose.dev.yml down -v + @echo "✅ Development database cleaned!" # Setup pre-commit hooks setup-precommit: @@ -185,8 +200,6 @@ format: lint: poetry run ruff check app/ tests/ - poetry run flake8 app/ tests/ - poetry run mypy app/ ruff: poetry run ruff check app/ tests/ --fix diff --git a/app/core/config.py b/app/core/config.py index 90c292a..60a14a9 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -59,8 +59,8 @@ class DevelopmentSettings(SettingsBase): APP_ENV: str = "development" DEBUG: bool = True LOG_LEVEL: str = "DEBUG" - DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/app_db" - TEST_DATABASE_URL: str = "postgresql://postgres:postgres@localhost:5432/test_db" + DATABASE_URL: str = "postgresql://dev:postgres@localhost:5432/app_db" + TEST_DATABASE_URL: str = "" allowed_origins_str: str = "http://localhost:3000,http://localhost:8000,http://localhost:5173" diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000..8fc319f --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,24 @@ +version: '3.8' + +services: + postgres: + image: postgres:16-alpine + container_name: postgres-dev + environment: + POSTGRES_USER: dev + POSTGRES_PASSWORD: postgres + POSTGRES_DB: app_db + ports: + - "5432:5432" + volumes: + - postgres_dev_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U dev"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + +volumes: + postgres_dev_data: + driver: local From 3cdab901dff93291150b6eae943b9cf119789aba Mon Sep 17 00:00:00 2001 From: dhuzjak Date: Thu, 18 Dec 2025 20:00:20 +0100 Subject: [PATCH 17/17] Final touch Separate .env loading for dev and production Load .env.dev for local develoment Load .env.production to docker compose app --- .env.development => .env.dev | 0 .env.example | 1 + API-Detailed.md | 2 ++ API.md | 2 ++ PROJECT_STRUCTURE.md | 2 +- app/core/config.py | 13 ++++++++++++- docker-compose.yml | 5 ++--- 7 files changed, 20 insertions(+), 5 deletions(-) rename .env.development => .env.dev (100%) diff --git a/.env.development b/.env.dev similarity index 100% rename from .env.development rename to .env.dev diff --git a/.env.example b/.env.example index 9ee5e55..64afa32 100644 --- a/.env.example +++ b/.env.example @@ -1,5 +1,6 @@ # Environment Selection # Options: development, testing, production +# Loads from: .env.dev, .env.testing, or .env.production respectively APP_ENV=development # Application Settings diff --git a/API-Detailed.md b/API-Detailed.md index 9ab7d0f..880e6c5 100644 --- a/API-Detailed.md +++ b/API-Detailed.md @@ -17,6 +17,8 @@ make docker-up-full docker-compose up -d ``` +**Note:** Docker Compose loads configuration from `.env.production` file. + The API will be available at: - API: `http://localhost:8000` - API Docs (Swagger): `http://localhost:8000/docs` diff --git a/API.md b/API.md index 2c6ed77..f7e6076 100644 --- a/API.md +++ b/API.md @@ -12,6 +12,8 @@ docker-compose up -d # API docs at: http://localhost:8000/docs ``` +**Note:** Docker uses `.env.production` for configuration. + ### Running Locally ```bash diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md index b83a1ca..a781436 100644 --- a/PROJECT_STRUCTURE.md +++ b/PROJECT_STRUCTURE.md @@ -271,7 +271,7 @@ The project uses layered environment file loading: 3. **Environment variables** - Highest priority, overrides all files **Available env files:** -- `.env.development` - Development defaults (committed) +- `.env.dev` - Development defaults (committed) - `.env.testing` - Testing configuration (committed) - `.env.production.example` - Production template (committed as example) - `.env.production` - Actual production config (NOT committed, gitignored) diff --git a/app/core/config.py b/app/core/config.py index 60a14a9..ad64e1c 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -8,6 +8,17 @@ from pydantic_settings import BaseSettings, SettingsConfigDict +def _get_env_file_suffix() -> str: + """Map APP_ENV to the correct .env file suffix.""" + env = os.getenv("APP_ENV", "development").lower() + env_file_map = { + "development": "dev", + "production": "production", + "testing": "testing", + } + return env_file_map.get(env, env) + + class SettingsBase(BaseSettings): """Base settings shared across all environments.""" @@ -37,7 +48,7 @@ class SettingsBase(BaseSettings): EXTERNAL_API_URL: str = "" model_config = SettingsConfigDict( - env_file=(".env", f".env.{os.getenv('APP_ENV', 'development')}"), + env_file=(".env", f".env.{_get_env_file_suffix()}"), env_file_encoding="utf-8", case_sensitive=True, extra="ignore", diff --git a/docker-compose.yml b/docker-compose.yml index 46b8e10..4862017 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,9 +25,8 @@ services: context: . dockerfile: Dockerfile container_name: server-inventory-app - environment: - APP_ENV: production - DATABASE_URL: postgresql://postgres:postgres@postgres:5432/app_db + env_file: + - .env.production ports: - "8000:8000" depends_on: