From 61d05220a86d878b62d29fe310c15ebde0b103fe Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 02:08:26 +0100 Subject: [PATCH 1/8] move to fork --- .gitignore | 73 +++++++++++++++++++++++++++++++++++ API.md | 93 +++++++++++++++++++++++++++++++++++++++++++++ Dockerfile | 11 ++++++ app/__init__.py | 0 app/database.py | 24 ++++++++++++ app/main.py | 10 +++++ app/models.py | 29 ++++++++++++++ app/routers.py | 95 ++++++++++++++++++++++++++++++++++++++++++++++ cli/__init__.py | 0 cli/main.py | 92 ++++++++++++++++++++++++++++++++++++++++++++ docker-compose.yml | 35 +++++++++++++++++ init.sql | 15 ++++++++ pyproject.toml | 31 +++++++++++++++ tests/__init__.py | 0 tests/test_api.py | 90 +++++++++++++++++++++++++++++++++++++++++++ tests/test_cli.py | 27 +++++++++++++ 16 files changed, 625 insertions(+) create mode 100644 .gitignore create mode 100644 API.md create mode 100644 Dockerfile create mode 100644 app/__init__.py create mode 100644 app/database.py create mode 100644 app/main.py create mode 100644 app/models.py create mode 100644 app/routers.py create mode 100644 cli/__init__.py create mode 100644 cli/main.py create mode 100644 docker-compose.yml create mode 100644 init.sql create mode 100644 pyproject.toml create mode 100644 tests/__init__.py create mode 100644 tests/test_api.py create mode 100644 tests/test_cli.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b34c9ad --- /dev/null +++ b/.gitignore @@ -0,0 +1,73 @@ +# Python +__pycache__/ +__pycache__ +__pycache__/* +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Virtual Environment +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# Editor / IDE +.vscode/ +.idea/ +*.swp +*.swo + +# Docker +.dockerignore + +# OS +.DS_Store +Thumbs.db + +# Project specific diff --git a/API.md b/API.md new file mode 100644 index 0000000..979b9e7 --- /dev/null +++ b/API.md @@ -0,0 +1,93 @@ +# Inventory Management System API & CLI + +## Overview +This project provides a REST API and a CLI for tracking server inventory. It uses FastAPI, PostgreSQL, and Typer. + +## Requirements +- Docker & Docker Compose +- Python 3.10+ (for local CLI usage, optional) + +## Running the Stack +To start the API and Database: +```bash +docker-compose up --build +``` +The API will be available at `http://localhost:8000`. +Documentation (Swagger UI) is available at `http://localhost:8000/docs`. + +## Running Tests +Tests are written using `pytest`. You can run them locally if you have valid python environment: + +1. Install dependencies: + ```bash + pip install .[test] + ``` +2. Run tests: + ```bash + pytest + ``` + +## CLI Usage +The CLI is a python script in `cli/main.py`. + +1. Ensure specific dependencies are installed or use the same environment: + ```bash + pip install typer requests + ``` +2. Run commands: + ```bash + # Create a server + python cli/main.py create web-01 192.168.1.5 active + + # List servers + python cli/main.py list + + # Get a server + python cli/main.py get 1 + + # Update a server + python cli/main.py update 1 --state offline + + # Delete a server + python cli/main.py delete 1 + ``` + +## API Specification + +### Endpoints + +#### POST /servers +Create a new server. +- **Body**: + ```json + { + "hostname": "string", + "ip_address": "string (IPv4)", + "state": "active|offline|retired" + } + ``` +- **Response**: 201 Created + +#### GET /servers +List all servers. +- **Response**: 200 OK (List of servers) + +#### GET /servers/{id} +Get a specific server. +- **Response**: 200 OK or 404 Not Found + +#### PUT /servers/{id} +Update a server. +- **Body** (all fields optional): + ```json + { + "hostname": "string", + "ip_address": "string", + "state": "string" + } + ``` +- **Response**: 200 OK + +#### DELETE /servers/{id} +Delete a server. +- **Response**: 204 No Content diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..d12a3f7 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY pyproject.toml . +RUN pip install --no-cache-dir . + +COPY app ./app +COPY cli ./cli + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..1603e27 --- /dev/null +++ b/app/database.py @@ -0,0 +1,24 @@ +import os +import psycopg +from psycopg.rows import dict_row +from contextlib import asynccontextmanager + +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://admin:password@localhost:5432/inventory") + +class Database: + def __init__(self): + self.conn_str = DATABASE_URL + + @asynccontextmanager + async def get_connection(self): + conn = await psycopg.AsyncConnection.connect(self.conn_str, row_factory=dict_row) + try: + yield conn + finally: + await conn.close() + +db = Database() + +async def get_db_connection(): + async with db.get_connection() as conn: + yield conn diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..3da23b8 --- /dev/null +++ b/app/main.py @@ -0,0 +1,10 @@ +from fastapi import FastAPI +from app.routers import router + +app = FastAPI(title="Server Inventory API") + +app.include_router(router) + +@app.get("/") +def read_root(): + return {"message": "Welcome to the Server Inventory API"} diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..edc5597 --- /dev/null +++ b/app/models.py @@ -0,0 +1,29 @@ +from enum import Enum +from datetime import datetime +from ipaddress import IPv4Address +from typing import Optional +from pydantic import BaseModel, ConfigDict, Field + +class ServerState(str, Enum): + active = "active" + offline = "offline" + retired = "retired" + +class ServerBase(BaseModel): + hostname: str = Field(..., min_length=1, max_length=255) + ip_address: IPv4Address + state: ServerState + +class ServerCreate(ServerBase): + pass + +class ServerUpdate(BaseModel): + hostname: Optional[str] = Field(None, min_length=1, max_length=255) + ip_address: Optional[IPv4Address] = None + state: Optional[ServerState] = None + +class Server(ServerBase): + id: int + created_at: datetime + + model_config = ConfigDict(from_attributes=True) diff --git a/app/routers.py b/app/routers.py new file mode 100644 index 0000000..100726e --- /dev/null +++ b/app/routers.py @@ -0,0 +1,95 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from psycopg import AsyncConnection +from psycopg.errors import UniqueViolation +from typing import List + +from app.database import get_db_connection +from app.models import Server, ServerCreate, ServerUpdate + +router = APIRouter(prefix="/servers", tags=["servers"]) + +@router.post("/", response_model=Server, status_code=status.HTTP_201_CREATED) +async def create_server(server: ServerCreate, conn: AsyncConnection = Depends(get_db_connection)): + try: + async with conn.cursor() as cur: + await cur.execute( + """ + INSERT INTO servers (hostname, ip_address, state) + VALUES (%s, %s, %s) + RETURNING id, hostname, ip_address, state, created_at + """, + (server.hostname, str(server.ip_address), server.state.value) + ) + new_server = await cur.fetchone() + await conn.commit() + return new_server + except UniqueViolation: + await conn.rollback() + raise HTTPException(status_code=400, detail="Server with this hostname already exists") + +@router.get("/", response_model=List[Server]) +async def list_servers(conn: AsyncConnection = Depends(get_db_connection)): + async with conn.cursor() as cur: + await cur.execute("SELECT id, hostname, ip_address, state, created_at FROM servers") + servers = await cur.fetchall() + return servers + +@router.get("/{server_id}", response_model=Server) +async def get_server(server_id: int, conn: AsyncConnection = Depends(get_db_connection)): + async with conn.cursor() as cur: + await cur.execute( + "SELECT id, hostname, ip_address, state, created_at FROM servers WHERE id = %s", + (server_id,) + ) + server = await cur.fetchone() + if not server: + raise HTTPException(status_code=404, detail="Server not found") + return server + +@router.put("/{server_id}", response_model=Server) +async def update_server(server_id: int, server_update: ServerUpdate, conn: AsyncConnection = Depends(get_db_connection)): + # Build query dynamically based on set fields + update_data = server_update.model_dump(exclude_unset=True) + if not update_data: + raise HTTPException(status_code=400, detail="No fields to update") + + set_clauses = [] + values = [] + for key, value in update_data.items(): + if key == 'ip_address': + value = str(value) + elif key == 'state': + value = value.value + + set_clauses.append(f"{key} = %s") + values.append(value) + + values.append(server_id) + + query = f""" + UPDATE servers + SET {", ".join(set_clauses)} + WHERE id = %s + RETURNING id, hostname, ip_address, state, created_at + """ + + try: + async with conn.cursor() as cur: + await cur.execute(query, values) + updated_server = await cur.fetchone() + if not updated_server: + raise HTTPException(status_code=404, detail="Server not found") + await conn.commit() + return updated_server + except UniqueViolation: + await conn.rollback() + raise HTTPException(status_code=400, detail="Server with this hostname already exists") + +@router.delete("/{server_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_server(server_id: int, conn: AsyncConnection = Depends(get_db_connection)): + async with conn.cursor() as cur: + await cur.execute("DELETE FROM servers WHERE id = %s RETURNING id", (server_id,)) + deleted = await cur.fetchone() + if not deleted: + raise HTTPException(status_code=404, detail="Server not found") + await conn.commit() diff --git a/cli/__init__.py b/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cli/main.py b/cli/main.py new file mode 100644 index 0000000..daf6a53 --- /dev/null +++ b/cli/main.py @@ -0,0 +1,92 @@ +import typer +import requests +import json +from typing import Optional +from enum import Enum + +app = typer.Typer() + +API_URL = "http://localhost:8000/servers" + +class ServerState(str, Enum): + active = "active" + offline = "offline" + retired = "retired" + +@app.command() +def create(hostname: str, ip_address: str, state: ServerState): + """Create a new server.""" + payload = { + "hostname": hostname, + "ip_address": ip_address, + "state": state.value + } + try: + response = requests.post(API_URL, json=payload) + response.raise_for_status() + typer.echo(json.dumps(response.json(), indent=2)) + except requests.exceptions.HTTPError as e: + typer.echo(f"Error: {e.response.text}", err=True) + except Exception as e: + typer.echo(f"Error: {e}", err=True) + +@app.command("list") +def list_servers(): + """List all servers.""" + try: + response = requests.get(API_URL) + response.raise_for_status() + typer.echo(json.dumps(response.json(), indent=2)) + except Exception as e: + typer.echo(f"Error: {e}", err=True) + +@app.command() +def get(server_id: int): + """Get a specific server by ID.""" + try: + response = requests.get(f"{API_URL}/{server_id}") + response.raise_for_status() + typer.echo(json.dumps(response.json(), indent=2)) + except requests.exceptions.HTTPError as e: + typer.echo(f"Error: {e.response.text}", err=True) + except Exception as e: + typer.echo(f"Error: {e}", err=True) + +@app.command() +def update(server_id: int, hostname: Optional[str] = None, ip_address: Optional[str] = None, state: Optional[ServerState] = None): + """Update a server.""" + payload = {} + if hostname: + payload["hostname"] = hostname + if ip_address: + payload["ip_address"] = ip_address + if state: + payload["state"] = state.value + + if not payload: + typer.echo("No updates specific.") + return + + try: + response = requests.put(f"{API_URL}/{server_id}", json=payload) + response.raise_for_status() + typer.echo(json.dumps(response.json(), indent=2)) + except requests.exceptions.HTTPError as e: + typer.echo(f"Error: {e.response.text}", err=True) + except Exception as e: + typer.echo(f"Error: {e}", err=True) + +@app.command() +def delete(server_id: int): + """Delete a server.""" + try: + response = requests.delete(f"{API_URL}/{server_id}") + response.raise_for_status() + typer.echo(f"Server {server_id} deleted successfully.") + except requests.exceptions.HTTPError as e: + typer.echo(f"Error: {e.response.text}", err=True) + except Exception as e: + typer.echo(f"Error: {e}", err=True) + +if __name__ == "__main__": + app() diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..18ff283 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,35 @@ +version: '3.8' + +services: + db: + image: postgres:15-alpine + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: password + POSTGRES_DB: inventory + volumes: + - postgres_data:/var/lib/postgresql/data + - ./init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - "5432:5432" + healthcheck: + test: [ "CMD-SHELL", "pg_isready -U admin -d inventory" ] + interval: 5s + timeout: 5s + retries: 5 + + api: + build: . + ports: + - "8000:8000" + environment: + DATABASE_URL: postgresql://admin:password@db:5432/inventory + depends_on: + db: + condition: service_healthy + volumes: + - .:/app + command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload + +volumes: + postgres_data: diff --git a/init.sql b/init.sql new file mode 100644 index 0000000..09aefbb --- /dev/null +++ b/init.sql @@ -0,0 +1,15 @@ +-- Create ENUM type for server state +DO $$ BEGIN + CREATE TYPE server_state AS ENUM ('active', 'offline', 'retired'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- Create servers table +CREATE TABLE IF NOT EXISTS servers ( + id SERIAL PRIMARY KEY, + hostname VARCHAR(255) NOT NULL UNIQUE, + ip_address INET, + state server_state NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..b95d710 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,31 @@ +[project] +name = "server-inventory" +version = "0.1.0" +description = "Server Inventory Management System" +authors = [ + {name = "User", email = "user@example.com"}, +] +dependencies = [ + "fastapi>=0.109.0", + "uvicorn[standard]>=0.27.0", + "psycopg[binary]>=3.1.17", + "pydantic>=2.6.0", + "pydantic-settings>=2.1.0", + "typer[all]>=0.9.0", + "requests>=2.31.0", +] +requires-python = ">=3.10" + +[project.optional-dependencies] +test = [ + "pytest>=8.0.0", + "pytest-asyncio>=0.23.0", + "httpx>=0.26.0", +] + +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["."] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..e176cfb --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,90 @@ +import pytest +from httpx import AsyncClient +from unittest.mock import AsyncMock, MagicMock +from app.main import app +from app.database import get_db_connection +from app.models import ServerState + +@pytest.fixture +def mock_db_cursor(): + cursor = AsyncMock() + # Mock context manager behavior for cursor + cursor.__aenter__.return_value = cursor + cursor.__aexit__.return_value = None + return cursor + +@pytest.fixture +def mock_db_connection(mock_db_cursor): + connection = AsyncMock() + connection.cursor = MagicMock(return_value=mock_db_cursor) + connection.commit = AsyncMock() + connection.rollback = AsyncMock() + return connection + +@pytest.fixture +def override_get_db(mock_db_connection): + async def _override(): + yield mock_db_connection + return _override + +from httpx import AsyncClient, ASGITransport + +@pytest.fixture +def client(override_get_db): + app.dependency_overrides[get_db_connection] = override_get_db + transport = ASGITransport(app=app) + return AsyncClient(transport=transport, base_url="http://test") + +@pytest.mark.asyncio +async def test_create_server(client, mock_db_cursor): + mock_db_cursor.fetchone.return_value = { + "id": 1, + "hostname": "web-01", + "ip_address": "192.168.1.10", + "state": "active", + "created_at": "2023-01-01T00:00:00" + } + + response = await client.post("/servers/", json={ + "hostname": "web-01", + "ip_address": "192.168.1.10", + "state": "active" + }) + + assert response.status_code == 201 + data = response.json() + assert data["hostname"] == "web-01" + assert data["state"] == "active" + +@pytest.mark.asyncio +async def test_list_servers(client, mock_db_cursor): + mock_db_cursor.fetchall.return_value = [ + { + "id": 1, + "hostname": "web-01", + "ip_address": "192.168.1.10", + "state": "active", + "created_at": "2023-01-01T00:00:00" + } + ] + + response = await client.get("/servers/") + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["hostname"] == "web-01" + +@pytest.mark.asyncio +async def test_get_server_not_found(client, mock_db_cursor): + mock_db_cursor.fetchone.return_value = None + response = await client.get("/servers/999") + assert response.status_code == 404 + +@pytest.mark.asyncio +async def test_create_server_invalid_ip(client): + response = await client.post("/servers/", json={ + "hostname": "test", + "ip_address": "invalid-ip", + "state": "active" + }) + assert response.status_code == 422 diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..e2ada41 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,27 @@ +from typer.testing import CliRunner +from cli.main import app +from unittest.mock import patch, MagicMock + +runner = CliRunner() + +def test_list_servers(): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = [{"id": 1, "hostname": "test"}] + + with patch("requests.get", return_value=mock_response) as mock_get: + result = runner.invoke(app, ["list"]) + assert result.exit_code == 0 + assert "test" in result.stdout + mock_get.assert_called_once() + +def test_create_server(): + mock_response = MagicMock() + mock_response.status_code = 201 + mock_response.json.return_value = {"id": 1, "hostname": "new", "state": "active"} + + with patch("requests.post", return_value=mock_response) as mock_post: + result = runner.invoke(app, ["create", "new", "1.1.1.1", "active"]) + assert result.exit_code == 0 + assert "new" in result.stdout + mock_post.assert_called_once() From 2f6cee50270ea074f932360f9f0a8530b2048e56 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 05:35:12 +0100 Subject: [PATCH 2/8] hardening --- .env.example | 10 ++++ Dockerfile | 41 +++++++++++++-- app/config.py | 8 +++ app/database.py | 4 +- docker-compose.yml | 10 ++-- tests/conftest.py | 67 +++++++++++++++++++++++++ tests/test_api.py | 121 +++++++++++++++++++++++---------------------- 7 files changed, 191 insertions(+), 70 deletions(-) create mode 100644 .env.example create mode 100644 app/config.py create mode 100644 tests/conftest.py diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..d213320 --- /dev/null +++ b/.env.example @@ -0,0 +1,10 @@ +# Database Configuration +POSTGRES_USER=admin +POSTGRES_PASSWORD=password +POSTGRES_DB=inventory +POSTGRES_HOST=db +POSTGRES_PORT=5432 + +# Application Configuration +# Constructed automatically in docker-compose, but if running locally: +# DATABASE_URL=postgresql://admin:password@localhost:5432/inventory diff --git a/Dockerfile b/Dockerfile index d12a3f7..86fc6e2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,44 @@ -FROM python:3.11-slim +# Build Stage +FROM python:3.10-slim-bullseye as builder WORKDIR /app +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +RUN pip install --upgrade pip + COPY pyproject.toml . -RUN pip install --no-cache-dir . +# Install dependencies into server_inventory.egg-info and get requirements +# We'll use a trick to install dependencies into a virtual environment or just user install +RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels . +# Better approach: Generate strict requirements.txt from pyproject.toml or just install . +# Since we don't have a lock file, we install directly for this example but in multiple stages. + +COPY . . +RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels . + +# Runtime Stage +FROM python:3.10-slim-bullseye + +WORKDIR /app + +# Create a non-root user +RUN addgroup --system app && adduser --system --group app + +# Install Runtime Dependencies +COPY --from=builder /app/wheels /wheels +COPY --from=builder /app/pyproject.toml . + +# Install dependencies from wheels +RUN pip install --no-cache /wheels/* + +COPY . /app + +# Change ownership +RUN chown -R app:app /app -COPY app ./app -COPY cli ./cli +# Switch to non-root user +USER app CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..7a4be14 --- /dev/null +++ b/app/config.py @@ -0,0 +1,8 @@ +from pydantic_settings import BaseSettings, SettingsConfigDict + +class Settings(BaseSettings): + DATABASE_URL: str = "postgresql://admin:password@localhost:5432/inventory" + + model_config = SettingsConfigDict(env_file=".env", extra="ignore") + +settings = Settings() diff --git a/app/database.py b/app/database.py index 1603e27..343ee24 100644 --- a/app/database.py +++ b/app/database.py @@ -3,7 +3,9 @@ from psycopg.rows import dict_row from contextlib import asynccontextmanager -DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://admin:password@localhost:5432/inventory") +from app.config import settings + +DATABASE_URL = settings.DATABASE_URL class Database: def __init__(self): diff --git a/docker-compose.yml b/docker-compose.yml index 18ff283..18af4bf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,16 +4,16 @@ services: db: image: postgres:15-alpine environment: - POSTGRES_USER: admin - POSTGRES_PASSWORD: password - POSTGRES_DB: inventory + POSTGRES_USER: ${POSTGRES_USER:-admin} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} + POSTGRES_DB: ${POSTGRES_DB:-inventory} volumes: - postgres_data:/var/lib/postgresql/data - ./init.sql:/docker-entrypoint-initdb.d/init.sql ports: - "5432:5432" healthcheck: - test: [ "CMD-SHELL", "pg_isready -U admin -d inventory" ] + test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-admin} -d ${POSTGRES_DB:-inventory}" ] interval: 5s timeout: 5s retries: 5 @@ -23,7 +23,7 @@ services: ports: - "8000:8000" environment: - DATABASE_URL: postgresql://admin:password@db:5432/inventory + DATABASE_URL: postgresql://${POSTGRES_USER:-admin}:${POSTGRES_PASSWORD:-password}@db:5432/${POSTGRES_DB:-inventory} depends_on: db: condition: service_healthy diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..c7b9c1c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,67 @@ +import pytest +import asyncio +from httpx import AsyncClient, ASGITransport +from app.main import app +from app.database import get_db_connection, db +from app.config import settings +import psycopg + +# Use a different DB for testing if possible, or just the same one for this simple task +# In a real world, we'd spin up a test container or create a test_db + +@pytest.fixture(scope="session") +def event_loop(): + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + +@pytest.fixture(scope="session") +async def db_pool(): + # Ensure tables exist + # In a real app, we would run alembic migrations here + # For this task, we will just execute init.sql content + conn_str = settings.DATABASE_URL + async with await psycopg.AsyncConnection.connect(conn_str, autocommit=True) as conn: + async with conn.cursor() as cur: + # Very simple teardown/rebuild for fresh state + await cur.execute("DROP TABLE IF EXISTS servers CASCADE") + await cur.execute("DROP TYPE IF EXISTS server_state CASCADE") + + # Re-create (Read init.sql would be better, but hardcoding for simplicity of this artifact) + await cur.execute(""" + DO $$ BEGIN + CREATE TYPE server_state AS ENUM ('active', 'offline', 'retired'); + EXCEPTION + WHEN duplicate_object THEN null; + END $$; + + CREATE TABLE IF NOT EXISTS servers ( + id SERIAL PRIMARY KEY, + hostname VARCHAR(255) NOT NULL UNIQUE, + ip_address INET, + state server_state NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP + ); + """) + yield + +@pytest.fixture +async def override_get_db(db_pool): + # We want a fresh transaction for each test that rolls back + # But for simplicity with psycopg3 async, we can just TRUNCATE or similar. + # Proper transactional tests are complex to setup without TestContainers or specific libs. + # Let's go with TRUNCATE for simplicity. + async with db.get_connection() as conn: + async with conn.cursor() as cur: + await cur.execute("TRUNCATE TABLE servers RESTART IDENTITY") + await conn.commit() + + # Return the actual dependency + async for conn in get_db_connection(): + yield conn + +@pytest.fixture +async def client(override_get_db): + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac diff --git a/tests/test_api.py b/tests/test_api.py index e176cfb..7dbc5a5 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,50 +1,8 @@ import pytest -from httpx import AsyncClient -from unittest.mock import AsyncMock, MagicMock -from app.main import app -from app.database import get_db_connection from app.models import ServerState -@pytest.fixture -def mock_db_cursor(): - cursor = AsyncMock() - # Mock context manager behavior for cursor - cursor.__aenter__.return_value = cursor - cursor.__aexit__.return_value = None - return cursor - -@pytest.fixture -def mock_db_connection(mock_db_cursor): - connection = AsyncMock() - connection.cursor = MagicMock(return_value=mock_db_cursor) - connection.commit = AsyncMock() - connection.rollback = AsyncMock() - return connection - -@pytest.fixture -def override_get_db(mock_db_connection): - async def _override(): - yield mock_db_connection - return _override - -from httpx import AsyncClient, ASGITransport - -@pytest.fixture -def client(override_get_db): - app.dependency_overrides[get_db_connection] = override_get_db - transport = ASGITransport(app=app) - return AsyncClient(transport=transport, base_url="http://test") - @pytest.mark.asyncio -async def test_create_server(client, mock_db_cursor): - mock_db_cursor.fetchone.return_value = { - "id": 1, - "hostname": "web-01", - "ip_address": "192.168.1.10", - "state": "active", - "created_at": "2023-01-01T00:00:00" - } - +async def test_create_server(client): response = await client.post("/servers/", json={ "hostname": "web-01", "ip_address": "192.168.1.10", @@ -55,36 +13,79 @@ async def test_create_server(client, mock_db_cursor): data = response.json() assert data["hostname"] == "web-01" assert data["state"] == "active" + assert "id" in data @pytest.mark.asyncio -async def test_list_servers(client, mock_db_cursor): - mock_db_cursor.fetchall.return_value = [ - { - "id": 1, - "hostname": "web-01", - "ip_address": "192.168.1.10", - "state": "active", - "created_at": "2023-01-01T00:00:00" - } - ] +async def test_create_duplicate_hostname(client): + # Create first + await client.post("/servers/", json={ + "hostname": "web-01", + "ip_address": "192.168.1.10", + "state": "active" + }) + + # Try duplicate + response = await client.post("/servers/", json={ + "hostname": "web-01", + "ip_address": "10.0.0.1", + "state": "offline" + }) + assert response.status_code == 400 + +@pytest.mark.asyncio +async def test_list_servers(client): + await client.post("/servers/", json={"hostname": "s1", "ip_address": "1.1.1.1", "state": "active"}) + await client.post("/servers/", json={"hostname": "s2", "ip_address": "2.2.2.2", "state": "offline"}) response = await client.get("/servers/") assert response.status_code == 200 data = response.json() - assert len(data) == 1 - assert data[0]["hostname"] == "web-01" + assert len(data) == 2 + +@pytest.mark.asyncio +async def test_get_server(client): + r_create = await client.post("/servers/", json={"hostname": "s1", "ip_address": "1.1.1.1", "state": "active"}) + server_id = r_create.json()["id"] + + response = await client.get(f"/servers/{server_id}") + assert response.status_code == 200 + assert response.json()["hostname"] == "s1" @pytest.mark.asyncio -async def test_get_server_not_found(client, mock_db_cursor): - mock_db_cursor.fetchone.return_value = None - response = await client.get("/servers/999") +async def test_get_server_not_found(client): + response = await client.get("/servers/999999") assert response.status_code == 404 +@pytest.mark.asyncio +async def test_update_server(client): + r_create = await client.post("/servers/", json={"hostname": "s1", "ip_address": "1.1.1.1", "state": "active"}) + server_id = r_create.json()["id"] + + response = await client.put(f"/servers/{server_id}", json={"state": "retired"}) + assert response.status_code == 200 + assert response.json()["state"] == "retired" + + # Verify persistence + r_get = await client.get(f"/servers/{server_id}") + assert r_get.json()["state"] == "retired" + +@pytest.mark.asyncio +async def test_delete_server(client): + r_create = await client.post("/servers/", json={"hostname": "s1", "ip_address": "1.1.1.1", "state": "active"}) + server_id = r_create.json()["id"] + + response = await client.delete(f"/servers/{server_id}") + assert response.status_code == 204 + + # Verify gone + r_get = await client.get(f"/servers/{server_id}") + assert r_get.status_code == 404 + @pytest.mark.asyncio async def test_create_server_invalid_ip(client): response = await client.post("/servers/", json={ - "hostname": "test", - "ip_address": "invalid-ip", + "hostname": "test-invalid-ip", + "ip_address": "invalid-ip-string", "state": "active" }) assert response.status_code == 422 From 1c6c7b13ba71ebebfea0fce07540f9a24c9fc699 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 06:00:09 +0100 Subject: [PATCH 3/8] podman stuff --- API.md | 4 ++++ docker-compose.yml | 6 ++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/API.md b/API.md index 979b9e7..9a8f46c 100644 --- a/API.md +++ b/API.md @@ -11,6 +11,10 @@ This project provides a REST API and a CLI for tracking server inventory. It use To start the API and Database: ```bash docker-compose up --build + +# Or using Podman +podman compose up --build + ``` The API will be available at `http://localhost:8000`. Documentation (Swagger UI) is available at `http://localhost:8000/docs`. diff --git a/docker-compose.yml b/docker-compose.yml index 18af4bf..03ac941 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,13 +3,14 @@ version: '3.8' services: db: image: postgres:15-alpine + container_name: db environment: POSTGRES_USER: ${POSTGRES_USER:-admin} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} POSTGRES_DB: ${POSTGRES_DB:-inventory} volumes: - postgres_data:/var/lib/postgresql/data - - ./init.sql:/docker-entrypoint-initdb.d/init.sql + - ./init.sql:/docker-entrypoint-initdb.d/init.sql:Z ports: - "5432:5432" healthcheck: @@ -20,6 +21,7 @@ services: api: build: . + container_name: api ports: - "8000:8000" environment: @@ -28,7 +30,7 @@ services: db: condition: service_healthy volumes: - - .:/app + - .:/app:Z command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload volumes: From 2bb3fe4f8259a929b1d184dc995d7b9a08004808 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 06:01:00 +0100 Subject: [PATCH 4/8] tests fix --- Dockerfile | 7 ++----- pyproject.toml | 3 +++ tests/conftest.py | 7 ++++--- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index 86fc6e2..04910e0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,12 +11,9 @@ RUN pip install --upgrade pip COPY pyproject.toml . # Install dependencies into server_inventory.egg-info and get requirements # We'll use a trick to install dependencies into a virtual environment or just user install -RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels . -# Better approach: Generate strict requirements.txt from pyproject.toml or just install . -# Since we don't have a lock file, we install directly for this example but in multiple stages. - +# Install dependencies into wheels directory COPY . . -RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels . +RUN pip wheel --no-cache-dir --wheel-dir /app/wheels ".[test]" # Runtime Stage FROM python:3.10-slim-bullseye diff --git a/pyproject.toml b/pyproject.toml index b95d710..7316b79 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,3 +29,6 @@ build-backend = "setuptools.build_meta" [tool.setuptools.packages.find] where = ["."] + +[tool.pytest.ini_options] +asyncio_mode = "auto" diff --git a/tests/conftest.py b/tests/conftest.py index c7b9c1c..92b7028 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio import asyncio from httpx import AsyncClient, ASGITransport from app.main import app @@ -15,7 +16,7 @@ def event_loop(): yield loop loop.close() -@pytest.fixture(scope="session") +@pytest_asyncio.fixture(scope="session") async def db_pool(): # Ensure tables exist # In a real app, we would run alembic migrations here @@ -45,7 +46,7 @@ async def db_pool(): """) yield -@pytest.fixture +@pytest_asyncio.fixture async def override_get_db(db_pool): # We want a fresh transaction for each test that rolls back # But for simplicity with psycopg3 async, we can just TRUNCATE or similar. @@ -60,7 +61,7 @@ async def override_get_db(db_pool): async for conn in get_db_connection(): yield conn -@pytest.fixture +@pytest_asyncio.fixture async def client(override_get_db): transport = ASGITransport(app=app) async with AsyncClient(transport=transport, base_url="http://test") as ac: From 9bda6f58adca772f4ae0c29a63016eadb56bb593 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 06:05:17 +0100 Subject: [PATCH 5/8] udpate docs --- API.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/API.md b/API.md index 9a8f46c..7eb071a 100644 --- a/API.md +++ b/API.md @@ -30,6 +30,11 @@ Tests are written using `pytest`. You can run them locally if you have valid pyt ```bash pytest ``` +or with podman + ```bash + podman compose up --build -d + podman compose exec api pytest + ``` ## CLI Usage The CLI is a python script in `cli/main.py`. From 17af05abff3714c5f550a5b4e1d3b7c389458586 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 06:10:01 +0100 Subject: [PATCH 6/8] cleanup --- .dockerignore | 6 ++++++ Dockerfile | 5 ++--- app/database.py | 1 - docker-compose.yml | 2 -- 4 files changed, 8 insertions(+), 6 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..941325b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +.git +__pycache__ +*.py[cod] +*.egg-info +.pytest_cache +.env diff --git a/Dockerfile b/Dockerfile index 04910e0..6845073 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,10 +9,9 @@ ENV PYTHONUNBUFFERED=1 RUN pip install --upgrade pip COPY pyproject.toml . -# Install dependencies into server_inventory.egg-info and get requirements -# We'll use a trick to install dependencies into a virtual environment or just user install -# Install dependencies into wheels directory COPY . . + +# Build wheels for all dependencies including test extras RUN pip wheel --no-cache-dir --wheel-dir /app/wheels ".[test]" # Runtime Stage diff --git a/app/database.py b/app/database.py index 343ee24..112f910 100644 --- a/app/database.py +++ b/app/database.py @@ -1,4 +1,3 @@ -import os import psycopg from psycopg.rows import dict_row from contextlib import asynccontextmanager diff --git a/docker-compose.yml b/docker-compose.yml index 03ac941..33208ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: db: image: postgres:15-alpine From 0d1bbf57dea9be6ae76aca87093e7b5197423369 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 06:50:49 +0100 Subject: [PATCH 7/8] game up --- .github/workflows/ci.yml | 67 +++++++++++++++++++++++++++++++++ .pre-commit-config.yaml | 15 ++++++++ Makefile | 37 ++++++++++++++++++ alembic.ini | 44 ++++++++++++++++++++++ alembic/env.py | 41 ++++++++++++++++++++ alembic/script.py.mako | 26 +++++++++++++ alembic/versions/001_initial.py | 43 +++++++++++++++++++++ app/database.py | 36 ++++++++++++++++-- app/health.py | 41 ++++++++++++++++++++ app/logging.py | 54 ++++++++++++++++++++++++++ app/main.py | 41 ++++++++++++++++++-- app/metrics.py | 43 +++++++++++++++++++++ app/routers.py | 11 +++++- app/tracing.py | 41 ++++++++++++++++++++ docker-compose.yml | 1 + pyproject.toml | 13 ++++++- 16 files changed, 543 insertions(+), 11 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .pre-commit-config.yaml create mode 100644 Makefile create mode 100644 alembic.ini create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/001_initial.py create mode 100644 app/health.py create mode 100644 app/logging.py create mode 100644 app/metrics.py create mode 100644 app/tracing.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..e9aa539 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,67 @@ +name: CI + +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + +jobs: + test: + runs-on: ubuntu-latest + env: + POSTGRES_USER: ${{ secrets.POSTGRES_USER || 'testuser' }} + POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD || 'testpass' }} + POSTGRES_DB: ${{ secrets.POSTGRES_DB || 'testdb' }} + + services: + postgres: + image: postgres:15-alpine + env: + POSTGRES_USER: ${{ env.POSTGRES_USER }} + POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} + POSTGRES_DB: ${{ env.POSTGRES_DB }} + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: pip + + - name: Install dependencies + run: pip install ".[test]" + + - name: Initialize database + run: | + PGPASSWORD=${{ env.POSTGRES_PASSWORD }} psql -h localhost -U ${{ env.POSTGRES_USER }} -d ${{ env.POSTGRES_DB }} -f init.sql + + - name: Run tests + run: pytest -v + env: + DATABASE_URL: postgresql://${{ env.POSTGRES_USER }}:${{ env.POSTGRES_PASSWORD }}@localhost:5432/${{ env.POSTGRES_DB }} + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Install ruff + run: pip install ruff + + - name: Run linter + run: ruff check . --output-format=github diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..e835e12 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,15 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.2.0 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..c536a1d --- /dev/null +++ b/Makefile @@ -0,0 +1,37 @@ +.PHONY: help install dev test lint format run clean docker-up docker-down docker-test + +help: ## Show this help + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-15s\033[0m %s\n", $$1, $$2}' + +install: ## Install production dependencies + pip install . + +dev: ## Install development dependencies + pip install -e ".[test]" + +test: ## Run tests + pytest -v + +lint: ## Run linter (ruff) + ruff check . + +format: ## Format code (ruff) + ruff format . + +run: ## Run the API locally + uvicorn app.main:app --reload + +clean: ## Clean up cache files + find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true + find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true + find . -type d -name "*.egg-info" -exec rm -rf {} + 2>/dev/null || true + +# Podman commands +podman-up: ## Start the stack with Podman + podman compose up -d --build + +podman-down: ## Stop the stack + podman compose down + +podman-test: ## Run tests in container + podman compose exec api pytest -v diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..5860553 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,44 @@ +# Alembic configuration file + +[alembic] +script_location = alembic +prepend_sys_path = . +version_path_separator = os + +sqlalchemy.url = driver://user:pass@localhost/dbname + +[post_write_hooks] + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..e072919 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,41 @@ +"""Alembic migration environment configuration.""" +import os +from logging.config import fileConfig + +from alembic import context + +config = context.config + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Use DATABASE_URL from environment +database_url = os.getenv("DATABASE_URL", "postgresql://admin:password@localhost:5432/inventory") + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode.""" + context.configure( + url=database_url, + target_metadata=None, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + import psycopg + + connectable = psycopg.connect(database_url, autocommit=True) + + with connectable: + context.configure(connection=connectable, target_metadata=None) + with context.begin_transaction(): + context.run_migrations() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/001_initial.py b/alembic/versions/001_initial.py new file mode 100644 index 0000000..cc7e4ea --- /dev/null +++ b/alembic/versions/001_initial.py @@ -0,0 +1,43 @@ +"""Initial schema - servers table + +Revision ID: 001_initial +Revises: +Create Date: 2024-01-01 00:00:00.000000 + +""" +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '001_initial' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create ENUM type + op.execute(""" + DO $$ BEGIN + CREATE TYPE server_state AS ENUM ('active', 'offline', 'retired'); + EXCEPTION + WHEN duplicate_object THEN null; + END $$; + """) + + # Create servers table + op.execute(""" + CREATE TABLE IF NOT EXISTS servers ( + id SERIAL PRIMARY KEY, + hostname VARCHAR(255) NOT NULL UNIQUE, + ip_address INET, + state server_state NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP + ); + """) + + +def downgrade() -> None: + op.execute("DROP TABLE IF EXISTS servers;") + op.execute("DROP TYPE IF EXISTS server_state;") diff --git a/app/database.py b/app/database.py index 112f910..e5770e3 100644 --- a/app/database.py +++ b/app/database.py @@ -1,14 +1,34 @@ import psycopg from psycopg.rows import dict_row +from psycopg_pool import AsyncConnectionPool from contextlib import asynccontextmanager from app.config import settings -DATABASE_URL = settings.DATABASE_URL +# Connection pool - reuses connections for better performance +pool: AsyncConnectionPool | None = None + +async def init_pool(): + """Initialize the connection pool. Call on app startup.""" + global pool + pool = AsyncConnectionPool( + conninfo=settings.DATABASE_URL, + min_size=2, + max_size=10, + kwargs={"row_factory": dict_row}, + ) + await pool.open() + +async def close_pool(): + """Close the connection pool. Call on app shutdown.""" + global pool + if pool: + await pool.close() class Database: + """Legacy class for backward compatibility.""" def __init__(self): - self.conn_str = DATABASE_URL + self.conn_str = settings.DATABASE_URL @asynccontextmanager async def get_connection(self): @@ -21,5 +41,13 @@ async def get_connection(self): db = Database() async def get_db_connection(): - async with db.get_connection() as conn: - yield conn + """Dependency that provides a pooled database connection.""" + global pool + if pool: + async with pool.connection() as conn: + yield conn + else: + # Fallback for tests or when pool isn't initialized + async with db.get_connection() as conn: + yield conn + diff --git a/app/health.py b/app/health.py new file mode 100644 index 0000000..15c1414 --- /dev/null +++ b/app/health.py @@ -0,0 +1,41 @@ +from fastapi import APIRouter +from pydantic import BaseModel +from datetime import datetime + +router = APIRouter(tags=["health"]) + +class HealthResponse(BaseModel): + status: str + timestamp: datetime + +class ReadyResponse(BaseModel): + status: str + database: str + timestamp: datetime + +@router.get("/health", response_model=HealthResponse) +async def health_check(): + """Liveness probe - is the service running?""" + return HealthResponse( + status="healthy", + timestamp=datetime.utcnow() + ) + +@router.get("/ready", response_model=ReadyResponse) +async def readiness_check(): + """Readiness probe - is the service ready to accept traffic?""" + from app.database import db + + try: + async with db.get_connection() as conn: + async with conn.cursor() as cur: + await cur.execute("SELECT 1") + db_status = "connected" + except Exception: + db_status = "disconnected" + + return ReadyResponse( + status="ready" if db_status == "connected" else "not_ready", + database=db_status, + timestamp=datetime.utcnow() + ) diff --git a/app/logging.py b/app/logging.py new file mode 100644 index 0000000..705c31b --- /dev/null +++ b/app/logging.py @@ -0,0 +1,54 @@ +"""Structured logging configuration using structlog.""" +import structlog +import logging +import sys + +def setup_logging(json_logs: bool = True, log_level: str = "INFO"): + """Configure structured logging for the application.""" + + shared_processors = [ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_log_level, + structlog.stdlib.add_logger_name, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.StackInfoRenderer(), + structlog.processors.UnicodeDecoder(), + ] + + if json_logs: + # JSON output for production + renderer = structlog.processors.JSONRenderer() + else: + # Pretty output for development + renderer = structlog.dev.ConsoleRenderer(colors=True) + + structlog.configure( + processors=shared_processors + [ + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + formatter = structlog.stdlib.ProcessorFormatter( + foreign_pre_chain=shared_processors, + processors=[ + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + renderer, + ], + ) + + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(formatter) + + root_logger = logging.getLogger() + root_logger.addHandler(handler) + root_logger.setLevel(getattr(logging, log_level.upper())) + + # Silence noisy loggers + logging.getLogger("uvicorn.access").setLevel(logging.WARNING) + +def get_logger(name: str = __name__): + """Get a structured logger instance.""" + return structlog.get_logger(name) diff --git a/app/main.py b/app/main.py index 3da23b8..d5a2d51 100644 --- a/app/main.py +++ b/app/main.py @@ -1,10 +1,43 @@ +from contextlib import asynccontextmanager from fastapi import FastAPI -from app.routers import router +from app.routers import router as servers_router +from app.health import router as health_router +from app.metrics import router as metrics_router +from app.database import init_pool, close_pool +from app.logging import setup_logging +from app.tracing import setup_tracing -app = FastAPI(title="Server Inventory API") +@asynccontextmanager +async def lifespan(app: FastAPI): + """Startup and shutdown events.""" + setup_logging(json_logs=False) # Set to True in production + await init_pool() + yield + await close_pool() -app.include_router(router) +app = FastAPI( + title="Server Inventory API", + description="CRUD API for managing server inventory", + version="1.0.0", + lifespan=lifespan +) + +# Setup observability +setup_tracing(app) + +# API versioning - mount servers under /v1 +app.include_router(servers_router, prefix="/v1") + +# Keep backward compatibility with non-versioned endpoints +app.include_router(servers_router) + +# Utility routers (no versioning needed) +app.include_router(health_router) +app.include_router(metrics_router) @app.get("/") def read_root(): - return {"message": "Welcome to the Server Inventory API"} + return {"message": "Welcome to the Server Inventory API", "version": "1.0.0"} + + + diff --git a/app/metrics.py b/app/metrics.py new file mode 100644 index 0000000..2ec3115 --- /dev/null +++ b/app/metrics.py @@ -0,0 +1,43 @@ +"""Prometheus metrics configuration.""" +from prometheus_client import Counter, Histogram, generate_latest, CONTENT_TYPE_LATEST +from fastapi import APIRouter, Response +import time +from functools import wraps + +router = APIRouter(tags=["metrics"]) + +# Define metrics +REQUEST_COUNT = Counter( + "http_requests_total", + "Total HTTP requests", + ["method", "endpoint", "status"] +) + +REQUEST_LATENCY = Histogram( + "http_request_duration_seconds", + "HTTP request latency in seconds", + ["method", "endpoint"] +) + +SERVERS_CREATED = Counter( + "servers_created_total", + "Total number of servers created" +) + +SERVERS_DELETED = Counter( + "servers_deleted_total", + "Total number of servers deleted" +) + +@router.get("/metrics") +async def metrics(): + """Prometheus metrics endpoint.""" + return Response( + content=generate_latest(), + media_type=CONTENT_TYPE_LATEST + ) + +def record_request(method: str, endpoint: str, status: int, duration: float): + """Record metrics for a request.""" + REQUEST_COUNT.labels(method=method, endpoint=endpoint, status=status).inc() + REQUEST_LATENCY.labels(method=method, endpoint=endpoint).observe(duration) diff --git a/app/routers.py b/app/routers.py index 100726e..2751fd0 100644 --- a/app/routers.py +++ b/app/routers.py @@ -28,9 +28,16 @@ async def create_server(server: ServerCreate, conn: AsyncConnection = Depends(ge raise HTTPException(status_code=400, detail="Server with this hostname already exists") @router.get("/", response_model=List[Server]) -async def list_servers(conn: AsyncConnection = Depends(get_db_connection)): +async def list_servers( + limit: int = 100, + offset: int = 0, + conn: AsyncConnection = Depends(get_db_connection) +): async with conn.cursor() as cur: - await cur.execute("SELECT id, hostname, ip_address, state, created_at FROM servers") + await cur.execute( + "SELECT id, hostname, ip_address, state, created_at FROM servers ORDER BY id LIMIT %s OFFSET %s", + (limit, offset) + ) servers = await cur.fetchall() return servers diff --git a/app/tracing.py b/app/tracing.py new file mode 100644 index 0000000..bff9a81 --- /dev/null +++ b/app/tracing.py @@ -0,0 +1,41 @@ +"""OpenTelemetry tracing configuration.""" +import os +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from opentelemetry.sdk.resources import Resource, SERVICE_NAME +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor + +_provider = None + +def setup_tracing(app, service_name: str = "server-inventory-api"): + """Configure OpenTelemetry tracing for the application.""" + global _provider + + resource = Resource(attributes={ + SERVICE_NAME: service_name + }) + + _provider = TracerProvider(resource=resource) + + # Only add console exporter if explicitly enabled (avoids I/O errors) + # In production, use OTLP exporter instead + if os.getenv("OTEL_CONSOLE_EXPORT", "").lower() == "true": + processor = BatchSpanProcessor(ConsoleSpanExporter()) + _provider.add_span_processor(processor) + + trace.set_tracer_provider(_provider) + + # Instrument FastAPI + FastAPIInstrumentor.instrument_app(app) + +def shutdown_tracing(): + """Shutdown tracing provider gracefully.""" + global _provider + if _provider: + _provider.shutdown() + +def get_tracer(name: str = __name__): + """Get a tracer instance for manual instrumentation.""" + return trace.get_tracer(name) + diff --git a/docker-compose.yml b/docker-compose.yml index 33208ae..a36bdfe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,6 +24,7 @@ services: - "8000:8000" environment: DATABASE_URL: postgresql://${POSTGRES_USER:-admin}:${POSTGRES_PASSWORD:-password}@db:5432/${POSTGRES_DB:-inventory} + OTEL_CONSOLE_EXPORT: false depends_on: db: condition: service_healthy diff --git a/pyproject.toml b/pyproject.toml index 7316b79..6224e02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,11 +8,17 @@ authors = [ dependencies = [ "fastapi>=0.109.0", "uvicorn[standard]>=0.27.0", - "psycopg[binary]>=3.1.17", + "psycopg[binary,pool]>=3.1.17", "pydantic>=2.6.0", "pydantic-settings>=2.1.0", "typer[all]>=0.9.0", "requests>=2.31.0", + "structlog>=24.1.0", + "alembic>=1.13.0", + "opentelemetry-api>=1.22.0", + "opentelemetry-sdk>=1.22.0", + "opentelemetry-instrumentation-fastapi>=0.43b0", + "prometheus-client>=0.19.0", ] requires-python = ">=3.10" @@ -22,6 +28,10 @@ test = [ "pytest-asyncio>=0.23.0", "httpx>=0.26.0", ] +dev = [ + "ruff>=0.2.0", + "pre-commit>=3.6.0", +] [build-system] requires = ["setuptools>=42", "wheel"] @@ -32,3 +42,4 @@ where = ["."] [tool.pytest.ini_options] asyncio_mode = "auto" +cache_dir = "/tmp/.pytest_cache" From f45c4e5c871419df36288db20fba420f0f927f99 Mon Sep 17 00:00:00 2001 From: Milos Ivancevic Date: Thu, 8 Jan 2026 16:21:42 +0100 Subject: [PATCH 8/8] add etag, CLI enhancements, Request ID tracing, filtering/search, pdate docs --- API.md | 215 ++++++++++++++++++++++++++-------------------- app/etag.py | 49 +++++++++++ app/main.py | 4 + app/middleware.py | 32 +++++++ app/routers.py | 133 +++++++++++++++++++++++++--- cli/main.py | 164 +++++++++++++++++++++++++---------- tests/test_api.py | 124 ++++++++++++++++++++++++++ 7 files changed, 572 insertions(+), 149 deletions(-) create mode 100644 app/etag.py create mode 100644 app/middleware.py diff --git a/API.md b/API.md index 7eb071a..2dee0f2 100644 --- a/API.md +++ b/API.md @@ -1,102 +1,133 @@ -# Inventory Management System API & CLI +# Server Inventory API & CLI ## Overview -This project provides a REST API and a CLI for tracking server inventory. It uses FastAPI, PostgreSQL, and Typer. +REST API and CLI for tracking server inventory. Built with FastAPI, PostgreSQL (raw SQL), and Typer. -## Requirements -- Docker & Docker Compose -- Python 3.10+ (for local CLI usage, optional) +## Quick Start -## Running the Stack -To start the API and Database: ```bash -docker-compose up --build +# Start with Docker/Podman +podman compose up -d --build -# Or using Podman -podman compose up --build +# Run tests +podman compose exec api pytest -v +# View API docs +open http://localhost:8000/docs ``` -The API will be available at `http://localhost:8000`. -Documentation (Swagger UI) is available at `http://localhost:8000/docs`. - -## Running Tests -Tests are written using `pytest`. You can run them locally if you have valid python environment: - -1. Install dependencies: - ```bash - pip install .[test] - ``` -2. Run tests: - ```bash - pytest - ``` -or with podman - ```bash - podman compose up --build -d - podman compose exec api pytest - ``` + +## Makefile Commands + +```bash +make help # Show all commands +make podman-up # Start the stack +make podman-test # Run tests in container +make lint # Run linter +make format # Format code +``` + +--- + +## API Endpoints + +### CRUD Operations + +| Method | Endpoint | Description | +|--------|----------|-------------| +| POST | `/servers` | Create a server | +| GET | `/servers` | List servers (with filtering) | +| GET | `/servers/{id}` | Get a server | +| PUT | `/servers/{id}` | Update a server | +| DELETE | `/servers/{id}` | Delete a server | + +### Filtering & Pagination + +```bash +GET /servers?limit=10&offset=0 # Pagination +GET /servers?state=active # Filter by state +GET /servers?hostname_contains=web # Search hostname +GET /servers?state=active&hostname_contains=web # Combined +``` + +### ETag Concurrency Control + +All responses include an `ETag` header for optimistic concurrency: + +```bash +# Conditional GET (returns 304 if unchanged) +curl -H "If-None-Match: \"abc123\"" http://localhost:8000/servers/1 + +# Conditional PUT (returns 412 if stale) +curl -X PUT -H "If-Match: \"abc123\"" -d '{"state":"offline"}' http://localhost:8000/servers/1 +``` + +### Health & Observability + +| Endpoint | Description | +|----------|-------------| +| `/health` | Liveness probe | +| `/ready` | Readiness probe (checks DB) | +| `/metrics` | Prometheus metrics | + +### API Versioning + +All endpoints are available at both `/servers` and `/v1/servers`. + +--- ## CLI Usage -The CLI is a python script in `cli/main.py`. - -1. Ensure specific dependencies are installed or use the same environment: - ```bash - pip install typer requests - ``` -2. Run commands: - ```bash - # Create a server - python cli/main.py create web-01 192.168.1.5 active - - # List servers - python cli/main.py list - - # Get a server - python cli/main.py get 1 - - # Update a server - python cli/main.py update 1 --state offline - - # Delete a server - python cli/main.py delete 1 - ``` - -## API Specification - -### Endpoints - -#### POST /servers -Create a new server. -- **Body**: - ```json - { - "hostname": "string", - "ip_address": "string (IPv4)", - "state": "active|offline|retired" - } - ``` -- **Response**: 201 Created - -#### GET /servers -List all servers. -- **Response**: 200 OK (List of servers) - -#### GET /servers/{id} -Get a specific server. -- **Response**: 200 OK or 404 Not Found - -#### PUT /servers/{id} -Update a server. -- **Body** (all fields optional): - ```json - { - "hostname": "string", - "ip_address": "string", - "state": "string" - } - ``` -- **Response**: 200 OK - -#### DELETE /servers/{id} -Delete a server. -- **Response**: 204 No Content + +```bash +# Basic commands +python cli/main.py create web-01 192.168.1.5 active +python cli/main.py list +python cli/main.py get 1 +python cli/main.py update 1 --state offline +python cli/main.py delete 1 + +# Output formats +python cli/main.py list --format json # JSON output +python cli/main.py list --format table # Table output (default) + +# Filtering +python cli/main.py list --state active +python cli/main.py list --hostname web +``` + +### CLI Features +- **Retry with backoff** - Auto-retries on connection errors +- **Format options** - `--format json` or `--format table` +- **Filtering** - `--state` and `--hostname` flags + +--- + +## Request Tracing + +All responses include `X-Request-ID` header for distributed tracing. +Send your own `X-Request-ID` header and it will be echoed back. + +--- + +## Development + +```bash +# Install dev dependencies +pip install -e ".[test,dev]" + +# Run pre-commit hooks +pre-commit install +pre-commit run --all-files + +# Run Alembic migrations +alembic upgrade head +``` + +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `DATABASE_URL` | `postgresql://admin:password@db:5432/inventory` | Database connection | +| `POSTGRES_USER` | `admin` | DB username | +| `POSTGRES_PASSWORD` | `password` | DB password | +| `POSTGRES_DB` | `inventory` | DB name | +| `OTEL_CONSOLE_EXPORT` | `false` | Enable trace console output | diff --git a/app/etag.py b/app/etag.py new file mode 100644 index 0000000..48f3264 --- /dev/null +++ b/app/etag.py @@ -0,0 +1,49 @@ +"""ETag utilities for optimistic concurrency control.""" +import hashlib +import json +from typing import Any, Dict + + +def generate_etag(data: Dict[str, Any]) -> str: + """Generate an ETag from server data. + + The ETag is a hash of the serializable fields, ensuring it changes + when any field is modified. + """ + # Convert datetime to ISO format string for hashing + serializable = {} + for key, value in data.items(): + if hasattr(value, 'isoformat'): + serializable[key] = value.isoformat() + else: + serializable[key] = str(value) if value is not None else None + + content = json.dumps(serializable, sort_keys=True) + return hashlib.md5(content.encode()).hexdigest() + + +def etag_matches(etag: str, if_match: str | None) -> bool: + """Check if the provided If-Match header matches the current ETag.""" + if if_match is None: + return True # No If-Match header means proceed + + # Handle weak ETags (W/"...") + if_match = if_match.strip() + if if_match.startswith('W/'): + if_match = if_match[2:] + if_match = if_match.strip('"') + + return etag == if_match + + +def etag_none_match(etag: str, if_none_match: str | None) -> bool: + """Check if the ETag matches If-None-Match (return True if NOT modified).""" + if if_none_match is None: + return False # No header means content is considered modified + + if_none_match = if_none_match.strip() + if if_none_match.startswith('W/'): + if_none_match = if_none_match[2:] + if_none_match = if_none_match.strip('"') + + return etag == if_none_match diff --git a/app/main.py b/app/main.py index d5a2d51..b92cca7 100644 --- a/app/main.py +++ b/app/main.py @@ -6,6 +6,7 @@ from app.database import init_pool, close_pool from app.logging import setup_logging from app.tracing import setup_tracing +from app.middleware import RequestIDMiddleware @asynccontextmanager async def lifespan(app: FastAPI): @@ -22,6 +23,9 @@ async def lifespan(app: FastAPI): lifespan=lifespan ) +# Add middleware +app.add_middleware(RequestIDMiddleware) + # Setup observability setup_tracing(app) diff --git a/app/middleware.py b/app/middleware.py new file mode 100644 index 0000000..a9602bb --- /dev/null +++ b/app/middleware.py @@ -0,0 +1,32 @@ +"""Request ID middleware for request tracing.""" +import uuid +from contextvars import ContextVar +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request + + +# Context variable to store request ID across async boundaries +request_id_ctx: ContextVar[str] = ContextVar("request_id", default="") + + +def get_request_id() -> str: + """Get the current request ID from context.""" + return request_id_ctx.get() + + +class RequestIDMiddleware(BaseHTTPMiddleware): + """Middleware that adds X-Request-ID header to all requests/responses.""" + + async def dispatch(self, request: Request, call_next): + # Use existing X-Request-ID or generate new one + request_id = request.headers.get("X-Request-ID", str(uuid.uuid4())) + + # Store in context for logging + token = request_id_ctx.set(request_id) + + try: + response = await call_next(request) + response.headers["X-Request-ID"] = request_id + return response + finally: + request_id_ctx.reset(token) diff --git a/app/routers.py b/app/routers.py index 2751fd0..7f53616 100644 --- a/app/routers.py +++ b/app/routers.py @@ -1,15 +1,21 @@ -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, status, Header, Response from psycopg import AsyncConnection from psycopg.errors import UniqueViolation -from typing import List +from typing import List, Optional from app.database import get_db_connection from app.models import Server, ServerCreate, ServerUpdate +from app.etag import generate_etag, etag_matches, etag_none_match router = APIRouter(prefix="/servers", tags=["servers"]) + @router.post("/", response_model=Server, status_code=status.HTTP_201_CREATED) -async def create_server(server: ServerCreate, conn: AsyncConnection = Depends(get_db_connection)): +async def create_server( + response: Response, + server: ServerCreate, + conn: AsyncConnection = Depends(get_db_connection) +): try: async with conn.cursor() as cur: await cur.execute( @@ -22,27 +28,71 @@ async def create_server(server: ServerCreate, conn: AsyncConnection = Depends(ge ) new_server = await cur.fetchone() await conn.commit() + + # Add ETag header to response + etag = generate_etag(new_server) + response.headers["ETag"] = f'"{etag}"' + return new_server except UniqueViolation: await conn.rollback() raise HTTPException(status_code=400, detail="Server with this hostname already exists") + @router.get("/", response_model=List[Server]) async def list_servers( limit: int = 100, offset: int = 0, + state: Optional[str] = None, + hostname_contains: Optional[str] = None, conn: AsyncConnection = Depends(get_db_connection) ): + """List servers with optional filtering. + + Args: + limit: Maximum number of results (default 100) + offset: Pagination offset (default 0) + state: Filter by server state (active, offline, retired) + hostname_contains: Filter servers whose hostname contains this string + """ + # Build dynamic WHERE clause + conditions = [] + params = [] + + if state: + conditions.append("state = %s") + params.append(state) + + if hostname_contains: + conditions.append("hostname ILIKE %s") + params.append(f"%{hostname_contains}%") + + where_clause = "" + if conditions: + where_clause = "WHERE " + " AND ".join(conditions) + + query = f""" + SELECT id, hostname, ip_address, state, created_at + FROM servers + {where_clause} + ORDER BY id + LIMIT %s OFFSET %s + """ + params.extend([limit, offset]) + async with conn.cursor() as cur: - await cur.execute( - "SELECT id, hostname, ip_address, state, created_at FROM servers ORDER BY id LIMIT %s OFFSET %s", - (limit, offset) - ) + await cur.execute(query, params) servers = await cur.fetchall() return servers + @router.get("/{server_id}", response_model=Server) -async def get_server(server_id: int, conn: AsyncConnection = Depends(get_db_connection)): +async def get_server( + server_id: int, + response: Response, + conn: AsyncConnection = Depends(get_db_connection), + if_none_match: Optional[str] = Header(None) +): async with conn.cursor() as cur: await cur.execute( "SELECT id, hostname, ip_address, state, created_at FROM servers WHERE id = %s", @@ -51,15 +101,49 @@ async def get_server(server_id: int, conn: AsyncConnection = Depends(get_db_conn server = await cur.fetchone() if not server: raise HTTPException(status_code=404, detail="Server not found") + + # Generate ETag + etag = generate_etag(server) + response.headers["ETag"] = f'"{etag}"' + + # Check If-None-Match for conditional GET (304 Not Modified) + if etag_none_match(etag, if_none_match): + return Response(status_code=status.HTTP_304_NOT_MODIFIED, headers={"ETag": f'"{etag}"'}) + return server + @router.put("/{server_id}", response_model=Server) -async def update_server(server_id: int, server_update: ServerUpdate, conn: AsyncConnection = Depends(get_db_connection)): +async def update_server( + server_id: int, + server_update: ServerUpdate, + response: Response, + conn: AsyncConnection = Depends(get_db_connection), + if_match: Optional[str] = Header(None) +): # Build query dynamically based on set fields update_data = server_update.model_dump(exclude_unset=True) if not update_data: raise HTTPException(status_code=400, detail="No fields to update") + # If If-Match is provided, verify the ETag before updating + if if_match: + async with conn.cursor() as cur: + await cur.execute( + "SELECT id, hostname, ip_address, state, created_at FROM servers WHERE id = %s", + (server_id,) + ) + current = await cur.fetchone() + if not current: + raise HTTPException(status_code=404, detail="Server not found") + + current_etag = generate_etag(current) + if not etag_matches(current_etag, if_match): + raise HTTPException( + status_code=status.HTTP_412_PRECONDITION_FAILED, + detail="Resource has been modified. Refresh and retry." + ) + set_clauses = [] values = [] for key, value in update_data.items(): @@ -87,16 +171,45 @@ async def update_server(server_id: int, server_update: ServerUpdate, conn: Async if not updated_server: raise HTTPException(status_code=404, detail="Server not found") await conn.commit() + + # Add new ETag to response + etag = generate_etag(updated_server) + response.headers["ETag"] = f'"{etag}"' + return updated_server except UniqueViolation: await conn.rollback() raise HTTPException(status_code=400, detail="Server with this hostname already exists") + @router.delete("/{server_id}", status_code=status.HTTP_204_NO_CONTENT) -async def delete_server(server_id: int, conn: AsyncConnection = Depends(get_db_connection)): +async def delete_server( + server_id: int, + conn: AsyncConnection = Depends(get_db_connection), + if_match: Optional[str] = Header(None) +): + # If If-Match is provided, verify the ETag before deleting + if if_match: + async with conn.cursor() as cur: + await cur.execute( + "SELECT id, hostname, ip_address, state, created_at FROM servers WHERE id = %s", + (server_id,) + ) + current = await cur.fetchone() + if not current: + raise HTTPException(status_code=404, detail="Server not found") + + current_etag = generate_etag(current) + if not etag_matches(current_etag, if_match): + raise HTTPException( + status_code=status.HTTP_412_PRECONDITION_FAILED, + detail="Resource has been modified. Refresh and retry." + ) + async with conn.cursor() as cur: await cur.execute("DELETE FROM servers WHERE id = %s RETURNING id", (server_id,)) deleted = await cur.fetchone() if not deleted: raise HTTPException(status_code=404, detail="Server not found") await conn.commit() + diff --git a/cli/main.py b/cli/main.py index daf6a53..2a08daf 100644 --- a/cli/main.py +++ b/cli/main.py @@ -1,59 +1,130 @@ import typer import requests import json +import time from typing import Optional from enum import Enum +from functools import wraps -app = typer.Typer() +app = typer.Typer(help="Server Inventory CLI - Manage your server fleet") API_URL = "http://localhost:8000/servers" +# Output format options +class OutputFormat(str, Enum): + json = "json" + table = "table" + class ServerState(str, Enum): active = "active" offline = "offline" retired = "retired" + +def retry_with_backoff(max_retries: int = 3, base_delay: float = 1.0): + """Decorator for exponential backoff retry on connection errors.""" + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + last_exception = None + for attempt in range(max_retries): + try: + return func(*args, **kwargs) + except requests.exceptions.ConnectionError as e: + last_exception = e + delay = base_delay * (2 ** attempt) + typer.echo(f"Connection failed, retrying in {delay}s... (attempt {attempt + 1}/{max_retries})", err=True) + time.sleep(delay) + typer.echo(f"Failed after {max_retries} attempts: {last_exception}", err=True) + raise typer.Exit(1) + return wrapper + return decorator + + +def format_output(data, fmt: OutputFormat): + """Format output based on user preference.""" + if fmt == OutputFormat.json: + return json.dumps(data, indent=2, default=str) + else: + # Table format + if isinstance(data, list): + if not data: + return "No servers found." + headers = list(data[0].keys()) + lines = [" | ".join(headers)] + lines.append("-" * len(lines[0])) + for item in data: + lines.append(" | ".join(str(item.get(h, "")) for h in headers)) + return "\n".join(lines) + else: + lines = [f"{k}: {v}" for k, v in data.items()] + return "\n".join(lines) + + @app.command() -def create(hostname: str, ip_address: str, state: ServerState): +@retry_with_backoff() +def create( + hostname: str, + ip_address: str, + state: ServerState, + format: OutputFormat = typer.Option(OutputFormat.table, "--format", "-f", help="Output format") +): """Create a new server.""" payload = { "hostname": hostname, "ip_address": ip_address, "state": state.value } - try: - response = requests.post(API_URL, json=payload) - response.raise_for_status() - typer.echo(json.dumps(response.json(), indent=2)) - except requests.exceptions.HTTPError as e: - typer.echo(f"Error: {e.response.text}", err=True) - except Exception as e: - typer.echo(f"Error: {e}", err=True) + response = requests.post(API_URL, json=payload) + if response.status_code >= 400: + typer.echo(f"Error: {response.text}", err=True) + raise typer.Exit(1) + typer.echo(format_output(response.json(), format)) + @app.command("list") -def list_servers(): - """List all servers.""" - try: - response = requests.get(API_URL) - response.raise_for_status() - typer.echo(json.dumps(response.json(), indent=2)) - except Exception as e: - typer.echo(f"Error: {e}", err=True) +@retry_with_backoff() +def list_servers( + format: OutputFormat = typer.Option(OutputFormat.table, "--format", "-f", help="Output format"), + state: Optional[str] = typer.Option(None, "--state", "-s", help="Filter by state"), + hostname: Optional[str] = typer.Option(None, "--hostname", "-h", help="Filter by hostname (contains)") +): + """List all servers with optional filtering.""" + params = {} + if state: + params["state"] = state + if hostname: + params["hostname_contains"] = hostname + + response = requests.get(API_URL, params=params) + response.raise_for_status() + typer.echo(format_output(response.json(), format)) + @app.command() -def get(server_id: int): +@retry_with_backoff() +def get( + server_id: int, + format: OutputFormat = typer.Option(OutputFormat.table, "--format", "-f", help="Output format") +): """Get a specific server by ID.""" - try: - response = requests.get(f"{API_URL}/{server_id}") - response.raise_for_status() - typer.echo(json.dumps(response.json(), indent=2)) - except requests.exceptions.HTTPError as e: - typer.echo(f"Error: {e.response.text}", err=True) - except Exception as e: - typer.echo(f"Error: {e}", err=True) + response = requests.get(f"{API_URL}/{server_id}") + if response.status_code == 404: + typer.echo(f"Error: Server {server_id} not found", err=True) + raise typer.Exit(1) + response.raise_for_status() + typer.echo(format_output(response.json(), format)) + @app.command() -def update(server_id: int, hostname: Optional[str] = None, ip_address: Optional[str] = None, state: Optional[ServerState] = None): +@retry_with_backoff() +def update( + server_id: int, + hostname: Optional[str] = typer.Option(None, "--hostname"), + ip_address: Optional[str] = typer.Option(None, "--ip"), + state: Optional[ServerState] = typer.Option(None, "--state"), + format: OutputFormat = typer.Option(OutputFormat.table, "--format", "-f", help="Output format") +): """Update a server.""" payload = {} if hostname: @@ -64,29 +135,28 @@ def update(server_id: int, hostname: Optional[str] = None, ip_address: Optional[ payload["state"] = state.value if not payload: - typer.echo("No updates specific.") - return - - try: - response = requests.put(f"{API_URL}/{server_id}", json=payload) - response.raise_for_status() - typer.echo(json.dumps(response.json(), indent=2)) - except requests.exceptions.HTTPError as e: - typer.echo(f"Error: {e.response.text}", err=True) - except Exception as e: - typer.echo(f"Error: {e}", err=True) + typer.echo("No updates specified. Use --hostname, --ip, or --state.", err=True) + raise typer.Exit(1) + + response = requests.put(f"{API_URL}/{server_id}", json=payload) + if response.status_code >= 400: + typer.echo(f"Error: {response.text}", err=True) + raise typer.Exit(1) + typer.echo(format_output(response.json(), format)) + @app.command() +@retry_with_backoff() def delete(server_id: int): """Delete a server.""" - try: - response = requests.delete(f"{API_URL}/{server_id}") - response.raise_for_status() - typer.echo(f"Server {server_id} deleted successfully.") - except requests.exceptions.HTTPError as e: - typer.echo(f"Error: {e.response.text}", err=True) - except Exception as e: - typer.echo(f"Error: {e}", err=True) + response = requests.delete(f"{API_URL}/{server_id}") + if response.status_code == 404: + typer.echo(f"Error: Server {server_id} not found", err=True) + raise typer.Exit(1) + response.raise_for_status() + typer.echo(f"✓ Server {server_id} deleted successfully.") + if __name__ == "__main__": app() + diff --git a/tests/test_api.py b/tests/test_api.py index 7dbc5a5..38a191a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -89,3 +89,127 @@ async def test_create_server_invalid_ip(client): "state": "active" }) assert response.status_code == 422 + + +# ETag Tests +@pytest.mark.asyncio +async def test_etag_returned_on_create(client): + """Test that ETag is returned when creating a server.""" + response = await client.post("/servers/", json={ + "hostname": "etag-test", + "ip_address": "10.0.0.1", + "state": "active" + }) + assert response.status_code == 201 + assert "etag" in response.headers + + +@pytest.mark.asyncio +async def test_etag_returned_on_get(client): + """Test that ETag is returned when getting a server.""" + r_create = await client.post("/servers/", json={ + "hostname": "etag-get-test", + "ip_address": "10.0.0.2", + "state": "active" + }) + server_id = r_create.json()["id"] + + response = await client.get(f"/servers/{server_id}") + assert response.status_code == 200 + assert "etag" in response.headers + + +@pytest.mark.asyncio +async def test_if_none_match_returns_304(client): + """Test conditional GET with If-None-Match returns 304 Not Modified.""" + r_create = await client.post("/servers/", json={ + "hostname": "conditional-get-test", + "ip_address": "10.0.0.3", + "state": "active" + }) + server_id = r_create.json()["id"] + + # First GET to get ETag + r_get = await client.get(f"/servers/{server_id}") + etag = r_get.headers["etag"] + + # Conditional GET with same ETag should return 304 + response = await client.get(f"/servers/{server_id}", headers={"If-None-Match": etag}) + assert response.status_code == 304 + + +@pytest.mark.asyncio +async def test_if_match_update_succeeds(client): + """Test update with correct If-Match header succeeds.""" + r_create = await client.post("/servers/", json={ + "hostname": "if-match-test", + "ip_address": "10.0.0.4", + "state": "active" + }) + server_id = r_create.json()["id"] + etag = r_create.headers["etag"] + + response = await client.put( + f"/servers/{server_id}", + json={"state": "offline"}, + headers={"If-Match": etag} + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_if_match_update_fails_with_stale_etag(client): + """Test update with stale If-Match header returns 412 Precondition Failed.""" + r_create = await client.post("/servers/", json={ + "hostname": "stale-etag-test", + "ip_address": "10.0.0.5", + "state": "active" + }) + server_id = r_create.json()["id"] + old_etag = r_create.headers["etag"] + + # Update to change the ETag + await client.put(f"/servers/{server_id}", json={"state": "offline"}) + + # Try to update with old ETag + response = await client.put( + f"/servers/{server_id}", + json={"state": "retired"}, + headers={"If-Match": old_etag} + ) + assert response.status_code == 412 + + +# Filtering Tests +@pytest.mark.asyncio +async def test_filter_by_state(client): + """Test filtering servers by state.""" + await client.post("/servers/", json={"hostname": "filter-active", "ip_address": "10.1.1.1", "state": "active"}) + await client.post("/servers/", json={"hostname": "filter-offline", "ip_address": "10.1.1.2", "state": "offline"}) + + response = await client.get("/servers/?state=active") + assert response.status_code == 200 + data = response.json() + assert all(s["state"] == "active" for s in data) + + +@pytest.mark.asyncio +async def test_filter_by_hostname_contains(client): + """Test filtering servers by hostname pattern.""" + await client.post("/servers/", json={"hostname": "web-prod-01", "ip_address": "10.2.1.1", "state": "active"}) + await client.post("/servers/", json={"hostname": "db-prod-01", "ip_address": "10.2.1.2", "state": "active"}) + + response = await client.get("/servers/?hostname_contains=web") + assert response.status_code == 200 + data = response.json() + assert all("web" in s["hostname"] for s in data) + + +# Request ID Test +@pytest.mark.asyncio +async def test_request_id_header(client): + """Test that X-Request-ID header is returned in responses.""" + response = await client.get("/servers/") + assert "x-request-id" in response.headers + +