diff --git a/.github/workflows/stack-tests.yml b/.github/workflows/stack-tests.yml index 8711ea9f..69ba1f69 100644 --- a/.github/workflows/stack-tests.yml +++ b/.github/workflows/stack-tests.yml @@ -185,76 +185,7 @@ jobs: path: /tmp/all-images.tar.zst retention-days: 1 - # Three parallel test jobs - backend-integration: - name: Backend Integration Tests - needs: [build-images] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6 - - - name: Cache and load Docker images - uses: ./.github/actions/docker-cache - with: - images: ${{ env.MONGO_IMAGE }} ${{ env.REDIS_IMAGE }} ${{ env.KAFKA_IMAGE }} ${{ env.ZOOKEEPER_IMAGE }} ${{ env.SCHEMA_REGISTRY_IMAGE }} - - - name: Download built images - uses: actions/download-artifact@v7 - with: - name: docker-images - path: /tmp - - - name: Load built images - run: zstd -d -c /tmp/all-images.tar.zst | docker load - - - name: Setup k3s - uses: ./.github/actions/k3s-setup - - - name: Use test environment config - run: cp backend/.env.test backend/.env - - - name: Start stack - run: ./deploy.sh dev --wait - - - name: Run integration tests - timeout-minutes: 10 - run: | - docker compose exec -T -e TEST_RUN_ID=integration backend \ - uv run pytest tests/integration -v -rs \ - --durations=0 \ - --cov=app \ - --cov-report=xml:coverage-integration.xml \ - --cov-report=term - - - name: Copy coverage - if: always() - run: docker compose cp backend:/app/coverage-integration.xml backend/coverage-integration.xml || true - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 - if: always() - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: backend/coverage-integration.xml - flags: backend-integration - name: backend-integration-coverage - fail_ci_if_error: false - - - name: Collect logs on failure - if: failure() - run: | - mkdir -p logs - docker compose logs > logs/docker-compose.log 2>&1 - docker compose logs backend > logs/backend.log 2>&1 - docker compose logs kafka > logs/kafka.log 2>&1 - - - name: Upload logs - if: failure() - uses: actions/upload-artifact@v6 - with: - name: backend-integration-logs - path: logs/ - + # Parallel test jobs (backend-e2e, frontend-e2e) backend-e2e: name: Backend E2E Tests needs: [build-images] @@ -289,9 +220,9 @@ jobs: run: docker compose exec -T backend uv run python scripts/seed_users.py - name: Run E2E tests - timeout-minutes: 10 + timeout-minutes: 15 run: | - docker compose exec -T -e TEST_RUN_ID=e2e backend \ + docker compose exec -T backend \ uv run pytest tests/e2e -v -rs \ --durations=0 \ --cov=app \ diff --git a/backend/app/api/routes/admin/events.py b/backend/app/api/routes/admin/events.py index 3620a8be..deb3e29d 100644 --- a/backend/app/api/routes/admin/events.py +++ b/backend/app/api/routes/admin/events.py @@ -77,7 +77,7 @@ async def export_events_csv( ) -> StreamingResponse: try: export_filter = EventFilter( - event_types=[str(et) for et in event_types] if event_types else None, + event_types=event_types, start_time=start_time, end_time=end_time, ) @@ -107,7 +107,7 @@ async def export_events_json( """Export events as JSON with comprehensive filtering.""" try: export_filter = EventFilter( - event_types=[str(et) for et in event_types] if event_types else None, + event_types=event_types, aggregate_id=aggregate_id, correlation_id=correlation_id, user_id=user_id, diff --git a/backend/app/api/routes/admin/users.py b/backend/app/api/routes/admin/users.py index 94ad2741..457c04ae 100644 --- a/backend/app/api/routes/admin/users.py +++ b/backend/app/api/routes/admin/users.py @@ -7,18 +7,19 @@ from app.api.dependencies import admin_user from app.db.repositories.admin.admin_user_repository import AdminUserRepository from app.domain.enums.user import UserRole -from app.domain.rate_limit import UserRateLimit +from app.domain.rate_limit import RateLimitRule, UserRateLimit from app.domain.user import UserUpdate as DomainUserUpdate from app.schemas_pydantic.admin_user_overview import ( AdminUserOverview, DerivedCounts, RateLimitSummary, ) -from app.schemas_pydantic.events import EventResponse, EventStatistics +from app.schemas_pydantic.events import EventStatistics from app.schemas_pydantic.user import ( DeleteUserResponse, MessageResponse, PasswordResetRequest, + RateLimitUpdateRequest, RateLimitUpdateResponse, UserCreate, UserListResponse, @@ -27,7 +28,6 @@ UserUpdate, ) from app.services.admin import AdminUserService -from app.services.rate_limit_service import RateLimitService router = APIRouter( prefix="/admin/users", tags=["admin", "users"], route_class=DishkaRoute, dependencies=[Depends(admin_user)] @@ -38,7 +38,6 @@ async def list_users( admin: Annotated[UserResponse, Depends(admin_user)], admin_user_service: FromDishka[AdminUserService], - rate_limit_service: FromDishka[RateLimitService], limit: int = Query(default=100, le=1000), offset: int = Query(default=0, ge=0), search: str | None = None, @@ -51,24 +50,8 @@ async def list_users( search=search, role=role, ) - - summaries = await rate_limit_service.get_user_rate_limit_summaries([u.user_id for u in result.users]) - user_responses: list[UserResponse] = [] - for user in result.users: - user_response = UserResponse.model_validate(user) - summary = summaries.get(user.user_id) - if summary: - user_response = user_response.model_copy( - update={ - "bypass_rate_limit": summary.bypass_rate_limit, - "global_multiplier": summary.global_multiplier, - "has_custom_limits": summary.has_custom_limits, - } - ) - user_responses.append(user_response) - return UserListResponse( - users=user_responses, + users=[UserResponse.model_validate(u) for u in result.users], total=result.total, offset=result.offset, limit=result.limit, @@ -119,7 +102,7 @@ async def get_user_overview( stats=EventStatistics.model_validate(domain.stats), derived_counts=DerivedCounts.model_validate(domain.derived_counts), rate_limit_summary=RateLimitSummary.model_validate(domain.rate_limit_summary), - recent_events=[EventResponse.model_validate(e).model_dump() for e in domain.recent_events], + recent_events=domain.recent_events, ) @@ -165,13 +148,19 @@ async def delete_user( if admin.user_id == user_id: raise HTTPException(status_code=400, detail="Cannot delete your own account") - deleted_counts = await admin_user_service.delete_user( + result = await admin_user_service.delete_user( admin_username=admin.username, user_id=user_id, cascade=cascade ) - if deleted_counts.get("user", 0) == 0: - raise HTTPException(status_code=500, detail="Failed to delete user") - - return DeleteUserResponse(message=f"User {user_id} deleted successfully", deleted_counts=deleted_counts) + return DeleteUserResponse( + message=f"User {user_id} deleted successfully", + user_deleted=result.user_deleted, + executions=result.executions, + saved_scripts=result.saved_scripts, + notifications=result.notifications, + user_settings=result.user_settings, + events=result.events, + sagas=result.sagas, + ) @router.post("/{user_id}/reset-password", response_model=MessageResponse) @@ -204,10 +193,15 @@ async def update_user_rate_limits( admin: Annotated[UserResponse, Depends(admin_user)], admin_user_service: FromDishka[AdminUserService], user_id: str, - rate_limit_config: UserRateLimit, + request: RateLimitUpdateRequest, ) -> RateLimitUpdateResponse: + config = UserRateLimit( + user_id=user_id, + rules=[RateLimitRule(**r.model_dump()) for r in request.rules], + **request.model_dump(exclude={"rules"}), + ) result = await admin_user_service.update_user_rate_limits( - admin_username=admin.username, user_id=user_id, config=rate_limit_config + admin_username=admin.username, user_id=user_id, config=config ) return RateLimitUpdateResponse.model_validate(result) diff --git a/backend/app/api/routes/events.py b/backend/app/api/routes/events.py index e27aa04a..405fd5de 100644 --- a/backend/app/api/routes/events.py +++ b/backend/app/api/routes/events.py @@ -6,13 +6,16 @@ from dishka import FromDishka from dishka.integrations.fastapi import DishkaRoute from fastapi import APIRouter, Depends, HTTPException, Query, Request +from pydantic import TypeAdapter from app.api.dependencies import admin_user, current_user from app.core.correlation import CorrelationContext from app.core.utils import get_client_ip from app.domain.enums.common import SortOrder +from app.domain.enums.events import EventType +from app.domain.enums.user import UserRole from app.domain.events.event_models import EventFilter -from app.domain.events.typed import BaseEvent, EventMetadata +from app.domain.events.typed import BaseEvent, DomainEvent, EventMetadata from app.schemas_pydantic.events import ( DeleteEventResponse, EventAggregationRequest, @@ -26,9 +29,12 @@ ) from app.schemas_pydantic.user import UserResponse from app.services.event_service import EventService +from app.services.execution_service import ExecutionService from app.services.kafka_event_service import KafkaEventService from app.settings import Settings +_event_response_list_adapter: TypeAdapter[list[EventResponse]] = TypeAdapter(list[EventResponse]) + router = APIRouter(prefix="/events", tags=["events"], route_class=DishkaRoute) @@ -37,10 +43,16 @@ async def get_execution_events( execution_id: str, current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService], + execution_service: FromDishka[ExecutionService], include_system_events: bool = Query(False, description="Include system-generated events"), limit: int = Query(100, ge=1, le=1000), skip: int = Query(0, ge=0), ) -> EventListResponse: + # Check execution ownership first (before checking events) + execution = await execution_service.get_execution_result(execution_id) + if execution.user_id and execution.user_id != current_user.user_id and current_user.role != UserRole.ADMIN: + raise HTTPException(status_code=403, detail="Access denied") + result = await event_service.get_execution_events( execution_id=execution_id, user_id=current_user.user_id, @@ -53,10 +65,8 @@ async def get_execution_events( if result is None: raise HTTPException(status_code=403, detail="Access denied") - event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse( - events=event_responses, + events=_event_response_list_adapter.validate_python(result.events, from_attributes=True), total=result.total, limit=limit, skip=skip, @@ -68,7 +78,7 @@ async def get_execution_events( async def get_user_events( current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService], - event_types: List[str] | None = Query(None), + event_types: List[EventType] | None = Query(None), start_time: datetime | None = Query(None), end_time: datetime | None = Query(None), limit: int = Query(100, ge=1, le=1000), @@ -86,10 +96,12 @@ async def get_user_events( sort_order=sort_order, ) - event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse( - events=event_responses, total=result.total, limit=limit, skip=skip, has_more=result.has_more + events=_event_response_list_adapter.validate_python(result.events, from_attributes=True), + total=result.total, + limit=limit, + skip=skip, + has_more=result.has_more, ) @@ -100,7 +112,7 @@ async def query_events( event_service: FromDishka[EventService], ) -> EventListResponse: event_filter = EventFilter( - event_types=[str(et) for et in filter_request.event_types] if filter_request.event_types else None, + event_types=filter_request.event_types, aggregate_id=filter_request.aggregate_id, correlation_id=filter_request.correlation_id, user_id=filter_request.user_id, @@ -121,10 +133,12 @@ async def query_events( if result is None: raise HTTPException(status_code=403, detail="Cannot query other users' events") - event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse( - events=event_responses, total=result.total, limit=result.limit, skip=result.skip, has_more=result.has_more + events=_event_response_list_adapter.validate_python(result.events, from_attributes=True), + total=result.total, + limit=result.limit, + skip=result.skip, + has_more=result.has_more, ) @@ -146,10 +160,8 @@ async def get_events_by_correlation( skip=skip, ) - event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse( - events=event_responses, + events=_event_response_list_adapter.validate_python(result.events, from_attributes=True), total=result.total, limit=limit, skip=skip, @@ -177,10 +189,8 @@ async def get_current_request_events( skip=skip, ) - event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse( - events=event_responses, + events=_event_response_list_adapter.validate_python(result.events, from_attributes=True), total=result.total, limit=limit, skip=skip, @@ -212,15 +222,15 @@ async def get_event_statistics( return EventStatistics.model_validate(stats) -@router.get("/{event_id}", response_model=EventResponse) +@router.get("/{event_id}", response_model=DomainEvent) async def get_event( event_id: str, current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService] -) -> EventResponse: +) -> DomainEvent: """Get a specific event by ID""" event = await event_service.get_event(event_id=event_id, user_id=current_user.user_id, user_role=current_user.role) if event is None: raise HTTPException(status_code=404, detail="Event not found") - return EventResponse.model_validate(event) + return event @router.post("/publish", response_model=PublishEventResponse) diff --git a/backend/app/api/routes/execution.py b/backend/app/api/routes/execution.py index d5cf51bb..6002e3f4 100644 --- a/backend/app/api/routes/execution.py +++ b/backend/app/api/routes/execution.py @@ -5,6 +5,7 @@ from dishka import FromDishka from dishka.integrations.fastapi import DishkaRoute, inject from fastapi import APIRouter, Depends, Header, HTTPException, Path, Query, Request +from pydantic import TypeAdapter from app.api.dependencies import admin_user, current_user from app.core.tracing import EventAttributes, add_span_attributes @@ -14,12 +15,12 @@ from app.domain.enums.user import UserRole from app.domain.events.typed import BaseEvent, EventMetadata from app.domain.exceptions import DomainError +from app.schemas_pydantic.events import EventResponse from app.schemas_pydantic.execution import ( CancelExecutionRequest, CancelResponse, DeleteResponse, ExampleScripts, - ExecutionEventResponse, ExecutionInDB, ExecutionListResponse, ExecutionRequest, @@ -35,14 +36,16 @@ from app.services.kafka_event_service import KafkaEventService from app.settings import Settings -router = APIRouter(route_class=DishkaRoute) +_event_list_adapter: TypeAdapter[list[EventResponse]] = TypeAdapter(list[EventResponse]) + +router = APIRouter(route_class=DishkaRoute, tags=["execution"]) @inject async def get_execution_with_access( - execution_id: Annotated[str, Path()], - current_user: Annotated[UserResponse, Depends(current_user)], - execution_service: FromDishka[ExecutionService], + execution_id: Annotated[str, Path()], + current_user: Annotated[UserResponse, Depends(current_user)], + execution_service: FromDishka[ExecutionService], ) -> ExecutionInDB: domain_exec = await execution_service.get_execution_result(execution_id) @@ -54,12 +57,12 @@ async def get_execution_with_access( @router.post("/execute", response_model=ExecutionResponse) async def create_execution( - request: Request, - current_user: Annotated[UserResponse, Depends(current_user)], - execution: ExecutionRequest, - execution_service: FromDishka[ExecutionService], - idempotency_manager: FromDishka[IdempotencyManager], - idempotency_key: Annotated[str | None, Header(alias="Idempotency-Key")] = None, + request: Request, + current_user: Annotated[UserResponse, Depends(current_user)], + execution: ExecutionRequest, + execution_service: FromDishka[ExecutionService], + idempotency_manager: FromDishka[IdempotencyManager], + idempotency_key: Annotated[str | None, Header(alias="Idempotency-Key")] = None, ) -> ExecutionResponse: add_span_attributes( **{ @@ -148,20 +151,20 @@ async def create_execution( raise HTTPException(status_code=500, detail="Internal server error during script execution") from e -@router.get("/result/{execution_id}", response_model=ExecutionResult) +@router.get("/executions/{execution_id}/result", response_model=ExecutionResult) async def get_result( - execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], + execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], ) -> ExecutionResult: return ExecutionResult.model_validate(execution) -@router.post("/{execution_id}/cancel", response_model=CancelResponse) +@router.post("/executions/{execution_id}/cancel", response_model=CancelResponse) async def cancel_execution( - execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], - current_user: Annotated[UserResponse, Depends(current_user)], - cancel_request: CancelExecutionRequest, - event_service: FromDishka[KafkaEventService], - settings: FromDishka[Settings], + execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], + current_user: Annotated[UserResponse, Depends(current_user)], + cancel_request: CancelExecutionRequest, + event_service: FromDishka[KafkaEventService], + settings: FromDishka[Settings], ) -> CancelResponse: # Handle terminal states terminal_states = [ExecutionStatus.COMPLETED, ExecutionStatus.FAILED, ExecutionStatus.TIMEOUT] @@ -204,13 +207,13 @@ async def cancel_execution( ) -@router.post("/{execution_id}/retry", response_model=ExecutionResponse) +@router.post("/executions/{execution_id}/retry", response_model=ExecutionResponse) async def retry_execution( - original_execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], - current_user: Annotated[UserResponse, Depends(current_user)], - retry_request: RetryExecutionRequest, - request: Request, - execution_service: FromDishka[ExecutionService], + original_execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], + current_user: Annotated[UserResponse, Depends(current_user)], + retry_request: RetryExecutionRequest, + request: Request, + execution_service: FromDishka[ExecutionService], ) -> ExecutionResponse: """Retry a failed or completed execution.""" @@ -231,30 +234,30 @@ async def retry_execution( return ExecutionResponse.model_validate(new_result) -@router.get("/executions/{execution_id}/events", response_model=list[ExecutionEventResponse]) +@router.get("/executions/{execution_id}/events", response_model=list[EventResponse]) async def get_execution_events( - execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], - event_service: FromDishka[EventService], - event_types: list[EventType] | None = Query(None, description="Event types to filter"), - limit: int = Query(100, ge=1, le=1000), -) -> list[ExecutionEventResponse]: + execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)], + event_service: FromDishka[EventService], + event_types: list[EventType] | None = Query(None, description="Event types to filter"), + limit: int = Query(100, ge=1, le=1000), +) -> list[EventResponse]: """Get all events for an execution.""" events = await event_service.get_events_by_aggregate( aggregate_id=execution.execution_id, event_types=event_types, limit=limit ) - return [ExecutionEventResponse.model_validate(e) for e in events] + return _event_list_adapter.validate_python([e.model_dump() for e in events]) @router.get("/user/executions", response_model=ExecutionListResponse) async def get_user_executions( - current_user: Annotated[UserResponse, Depends(current_user)], - execution_service: FromDishka[ExecutionService], - status: ExecutionStatus | None = Query(None), - lang: str | None = Query(None), - start_time: datetime | None = Query(None), - end_time: datetime | None = Query(None), - limit: int = Query(50, ge=1, le=200), - skip: int = Query(0, ge=0), + current_user: Annotated[UserResponse, Depends(current_user)], + execution_service: FromDishka[ExecutionService], + status: ExecutionStatus | None = Query(None), + lang: str | None = Query(None), + start_time: datetime | None = Query(None), + end_time: datetime | None = Query(None), + limit: int = Query(50, ge=1, le=200), + skip: int = Query(0, ge=0), ) -> ExecutionListResponse: """Get executions for the current user.""" @@ -281,7 +284,7 @@ async def get_user_executions( @router.get("/example-scripts", response_model=ExampleScripts) async def get_example_scripts( - execution_service: FromDishka[ExecutionService], + execution_service: FromDishka[ExecutionService], ) -> ExampleScripts: scripts = await execution_service.get_example_scripts() return ExampleScripts(scripts=scripts) @@ -289,7 +292,7 @@ async def get_example_scripts( @router.get("/k8s-limits", response_model=ResourceLimits) async def get_k8s_resource_limits( - execution_service: FromDishka[ExecutionService], + execution_service: FromDishka[ExecutionService], ) -> ResourceLimits: try: limits = await execution_service.get_k8s_resource_limits() @@ -298,11 +301,11 @@ async def get_k8s_resource_limits( raise HTTPException(status_code=500, detail="Failed to retrieve resource limits") from e -@router.delete("/{execution_id}", response_model=DeleteResponse) +@router.delete("/executions/{execution_id}", response_model=DeleteResponse) async def delete_execution( - execution_id: str, - admin: Annotated[UserResponse, Depends(admin_user)], - execution_service: FromDishka[ExecutionService], + execution_id: str, + admin: Annotated[UserResponse, Depends(admin_user)], + execution_service: FromDishka[ExecutionService], ) -> DeleteResponse: """Delete an execution and its associated data (admin only).""" await execution_service.delete_execution(execution_id) diff --git a/backend/app/core/container.py b/backend/app/core/container.py index b67f133a..1a82b14d 100644 --- a/backend/app/core/container.py +++ b/backend/app/core/container.py @@ -19,6 +19,7 @@ PodMonitorProvider, RedisProvider, RepositoryProvider, + ResourceCleanerProvider, SagaOrchestratorProvider, SettingsProvider, SSEProvider, @@ -50,7 +51,10 @@ def create_app_container(settings: Settings) -> AsyncContainer: AuthProvider(), UserServicesProvider(), AdminServicesProvider(), + EventReplayProvider(), BusinessServicesProvider(), + KubernetesProvider(), + ResourceCleanerProvider(), FastapiProvider(), context={Settings: settings}, ) diff --git a/backend/app/core/providers.py b/backend/app/core/providers.py index 6ce30a01..fdd57494 100644 --- a/backend/app/core/providers.py +++ b/backend/app/core/providers.py @@ -67,6 +67,7 @@ from app.services.pod_monitor.monitor import PodMonitor from app.services.rate_limit_service import RateLimitService from app.services.replay_service import ReplayService +from app.services.result_processor.resource_cleaner import ResourceCleaner from app.services.saga import SagaOrchestrator, create_saga_orchestrator from app.services.saga.saga_service import SagaService from app.services.saved_script_service import SavedScriptService @@ -252,6 +253,14 @@ async def get_k8s_clients(self, settings: Settings, logger: logging.Logger) -> A close_k8s_clients(clients) +class ResourceCleanerProvider(Provider): + scope = Scope.APP + + @provide + def get_resource_cleaner(self, k8s_clients: K8sClients, logger: logging.Logger) -> ResourceCleaner: + return ResourceCleaner(k8s_clients=k8s_clients, logger=logger) + + class MetricsProvider(Provider): """Provides all metrics instances via DI (no contextvars needed).""" @@ -650,21 +659,12 @@ def get_saved_script_service( return SavedScriptService(saved_script_repository, logger) @provide - async def get_replay_service( + def get_replay_service( self, replay_repository: ReplayRepository, - kafka_producer: UnifiedProducer, - event_store: EventStore, - settings: Settings, + event_replay_service: EventReplayService, logger: logging.Logger, ) -> ReplayService: - event_replay_service = EventReplayService( - repository=replay_repository, - producer=kafka_producer, - event_store=event_store, - settings=settings, - logger=logger, - ) return ReplayService(replay_repository, event_replay_service, logger) @provide diff --git a/backend/app/db/repositories/admin/admin_events_repository.py b/backend/app/db/repositories/admin/admin_events_repository.py index 0f8a6bb3..77f2e87f 100644 --- a/backend/app/db/repositories/admin/admin_events_repository.py +++ b/backend/app/db/repositories/admin/admin_events_repository.py @@ -75,7 +75,7 @@ async def get_event_detail(self, event_id: str) -> EventDetail | None: related_events = [ EventSummary( event_id=d.event_id, - event_type=str(d.event_type), + event_type=d.event_type, timestamp=d.timestamp, aggregate_id=d.aggregate_id, ) @@ -203,7 +203,7 @@ async def export_events_csv(self, event_filter: EventFilter) -> list[EventExport return [ EventExportRow( event_id=doc.event_id, - event_type=str(doc.event_type), + event_type=doc.event_type, timestamp=doc.timestamp, correlation_id=doc.metadata.correlation_id or "", aggregate_id=doc.aggregate_id or "", @@ -324,7 +324,7 @@ async def get_events_preview_for_replay(self, replay_filter: ReplayFilter, limit return [ EventSummary( event_id=doc.event_id, - event_type=str(doc.event_type), + event_type=doc.event_type, timestamp=doc.timestamp, aggregate_id=doc.aggregate_id, ) diff --git a/backend/app/db/repositories/admin/admin_user_repository.py b/backend/app/db/repositories/admin/admin_user_repository.py index 43e8ec5e..68add3b7 100644 --- a/backend/app/db/repositories/admin/admin_user_repository.py +++ b/backend/app/db/repositories/admin/admin_user_repository.py @@ -15,7 +15,15 @@ UserSettingsDocument, ) from app.domain.enums import UserRole -from app.domain.user import DomainUserCreate, PasswordReset, User, UserListResult, UserUpdate +from app.domain.user import ( + DomainUserCreate, + PasswordReset, + User, + UserDeleteResult, + UserListResult, + UserNotFoundError, + UserUpdate, +) class AdminUserRepository: @@ -69,39 +77,33 @@ async def update_user(self, user_id: str, update_data: UserUpdate) -> User | Non await doc.set(update_dict) return User.model_validate(doc, from_attributes=True) - async def delete_user(self, user_id: str, cascade: bool = True) -> dict[str, int]: - deleted_counts = {} - + async def delete_user(self, user_id: str, cascade: bool = True) -> UserDeleteResult: doc = await UserDocument.find_one(UserDocument.user_id == user_id) - if doc: - await doc.delete() - deleted_counts["user"] = 1 - else: - deleted_counts["user"] = 0 + if not doc: + raise UserNotFoundError(user_id) + + await doc.delete() if not cascade: - return deleted_counts + return UserDeleteResult(user_deleted=True) # Cascade delete related data exec_result = await ExecutionDocument.find(ExecutionDocument.user_id == user_id).delete() - deleted_counts["executions"] = exec_result.deleted_count if exec_result else 0 - scripts_result = await SavedScriptDocument.find(SavedScriptDocument.user_id == user_id).delete() - deleted_counts["saved_scripts"] = scripts_result.deleted_count if scripts_result else 0 - notif_result = await NotificationDocument.find(NotificationDocument.user_id == user_id).delete() - deleted_counts["notifications"] = notif_result.deleted_count if notif_result else 0 - settings_result = await UserSettingsDocument.find(UserSettingsDocument.user_id == user_id).delete() - deleted_counts["user_settings"] = settings_result.deleted_count if settings_result else 0 - events_result = await EventDocument.find(EventDocument.metadata.user_id == user_id).delete() - deleted_counts["events"] = events_result.deleted_count if events_result else 0 - sagas_result = await SagaDocument.find(SagaDocument.context_data["user_id"] == user_id).delete() - deleted_counts["sagas"] = sagas_result.deleted_count if sagas_result else 0 - return deleted_counts + return UserDeleteResult( + user_deleted=True, + executions=exec_result.deleted_count if exec_result else 0, + saved_scripts=scripts_result.deleted_count if scripts_result else 0, + notifications=notif_result.deleted_count if notif_result else 0, + user_settings=settings_result.deleted_count if settings_result else 0, + events=events_result.deleted_count if events_result else 0, + sagas=sagas_result.deleted_count if sagas_result else 0, + ) async def reset_user_password(self, reset_data: PasswordReset) -> bool: doc = await UserDocument.find_one(UserDocument.user_id == reset_data.user_id) diff --git a/backend/app/db/repositories/dlq_repository.py b/backend/app/db/repositories/dlq_repository.py index 7ef78434..6390a7b2 100644 --- a/backend/app/db/repositories/dlq_repository.py +++ b/backend/app/db/repositories/dlq_repository.py @@ -96,7 +96,7 @@ async def get_messages( conditions: list[Any] = [ DLQMessageDocument.status == status if status else None, DLQMessageDocument.original_topic == topic if topic else None, - DLQMessageDocument.event_type == event_type if event_type else None, + DLQMessageDocument.event.event_type == event_type if event_type else None, ] conditions = [c for c in conditions if c is not None] diff --git a/backend/app/db/repositories/event_repository.py b/backend/app/db/repositories/event_repository.py index d0f8daee..c55084c6 100644 --- a/backend/app/db/repositories/event_repository.py +++ b/backend/app/db/repositories/event_repository.py @@ -74,7 +74,7 @@ async def get_event(self, event_id: str) -> DomainEvent | None: async def get_events_by_type( self, - event_type: str, + event_type: EventType, start_time: datetime | None = None, end_time: datetime | None = None, limit: int = 100, @@ -122,7 +122,7 @@ async def get_events_by_correlation(self, correlation_id: str, limit: int = 100, async def get_events_by_user( self, user_id: str, - event_types: list[str] | None = None, + event_types: list[EventType] | None = None, start_time: datetime | None = None, end_time: datetime | None = None, limit: int = 100, @@ -240,7 +240,7 @@ async def get_event_statistics( return EventStatistics(total_events=0, events_by_type={}, events_by_service={}, events_by_hour=[]) async def cleanup_old_events( - self, older_than_days: int = 30, event_types: list[str] | None = None, dry_run: bool = False + self, older_than_days: int = 30, event_types: list[EventType] | None = None, dry_run: bool = False ) -> int: cutoff_dt = datetime.now(timezone.utc) - timedelta(days=older_than_days) conditions: list[Any] = [ @@ -262,7 +262,7 @@ async def cleanup_old_events( async def get_user_events_paginated( self, user_id: str, - event_types: list[str] | None = None, + event_types: list[EventType] | None = None, start_time: datetime | None = None, end_time: datetime | None = None, limit: int = 100, diff --git a/backend/app/dlq/manager.py b/backend/app/dlq/manager.py index 1d450a03..1e20dc23 100644 --- a/backend/app/dlq/manager.py +++ b/backend/app/dlq/manager.py @@ -453,7 +453,7 @@ def create_dlq_manager( consumer = AIOKafkaConsumer( topic_name, bootstrap_servers=settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"{GroupId.DLQ_MANAGER}.{settings.KAFKA_GROUP_SUFFIX}", + group_id=GroupId.DLQ_MANAGER, enable_auto_commit=False, auto_offset_reset="earliest", client_id="dlq-manager-consumer", diff --git a/backend/app/domain/enums/sse.py b/backend/app/domain/enums/sse.py index 7c7c1a03..85885634 100644 --- a/backend/app/domain/enums/sse.py +++ b/backend/app/domain/enums/sse.py @@ -1,6 +1,13 @@ from app.core.utils import StringEnum +class SSEHealthStatus(StringEnum): + """Health status for SSE service.""" + + HEALTHY = "healthy" + DRAINING = "draining" + + class SSEControlEvent(StringEnum): """Control events for execution SSE streams (not from Kafka).""" diff --git a/backend/app/domain/events/event_models.py b/backend/app/domain/events/event_models.py index e3a6f913..0293e88e 100644 --- a/backend/app/domain/events/event_models.py +++ b/backend/app/domain/events/event_models.py @@ -6,6 +6,7 @@ from pydantic.dataclasses import dataclass from app.core.utils import StringEnum +from app.domain.enums.events import EventType from app.domain.events.typed import DomainEvent MongoQueryValue = str | dict[str, str | list[str] | float | datetime] @@ -45,7 +46,7 @@ class EventSummary: """Lightweight event summary for lists and previews.""" event_id: str - event_type: str + event_type: EventType timestamp: datetime aggregate_id: str | None = None @@ -55,7 +56,7 @@ class EventFilter(BaseModel): model_config = ConfigDict(from_attributes=True) - event_types: list[str] | None = None + event_types: list[EventType] | None = None aggregate_id: str | None = None correlation_id: str | None = None user_id: str | None = None @@ -156,7 +157,7 @@ class EventReplayInfo: events: list[DomainEvent] event_count: int - event_types: list[str] + event_types: list[EventType] start_time: datetime end_time: datetime @@ -187,7 +188,7 @@ class EventExportRow(BaseModel): model_config = ConfigDict(from_attributes=True) event_id: str - event_type: str + event_type: EventType timestamp: datetime correlation_id: str aggregate_id: str diff --git a/backend/app/domain/events/typed.py b/backend/app/domain/events/typed.py index 5157be88..d71004f3 100644 --- a/backend/app/domain/events/typed.py +++ b/backend/app/domain/events/typed.py @@ -648,5 +648,19 @@ class ArchivedEvent(AvroBase): Discriminator("event_type"), ] +# Focused union for execution-related events only (for API response typing) +ExecutionDomainEvent = Annotated[ + ExecutionRequestedEvent + | ExecutionAcceptedEvent + | ExecutionQueuedEvent + | ExecutionStartedEvent + | ExecutionRunningEvent + | ExecutionCompletedEvent + | ExecutionFailedEvent + | ExecutionTimeoutEvent + | ExecutionCancelledEvent, + Discriminator("event_type"), +] + # TypeAdapter for polymorphic loading - validates raw data to correct typed event domain_event_adapter: TypeAdapter[DomainEvent] = TypeAdapter(DomainEvent) diff --git a/backend/app/domain/sse/models.py b/backend/app/domain/sse/models.py index c585af05..f4452145 100644 --- a/backend/app/domain/sse/models.py +++ b/backend/app/domain/sse/models.py @@ -5,6 +5,7 @@ from pydantic import BaseModel, ConfigDict from app.domain.enums.execution import ExecutionStatus +from app.domain.enums.sse import SSEHealthStatus class ShutdownStatus(BaseModel): @@ -23,7 +24,7 @@ class ShutdownStatus(BaseModel): class SSEHealthDomain(BaseModel): model_config = ConfigDict(from_attributes=True) - status: str + status: SSEHealthStatus kafka_enabled: bool active_connections: int active_executions: int diff --git a/backend/app/domain/user/__init__.py b/backend/app/domain/user/__init__.py index 2df59ec8..0eda945a 100644 --- a/backend/app/domain/user/__init__.py +++ b/backend/app/domain/user/__init__.py @@ -23,6 +23,7 @@ PasswordReset, User, UserCreation, + UserDeleteResult, UserFields, UserFilterType, UserListResult, @@ -48,6 +49,7 @@ "TokenExpiredError", "User", "UserCreation", + "UserDeleteResult", "UserFields", "UserFilterType", "UserListResult", diff --git a/backend/app/domain/user/user_models.py b/backend/app/domain/user/user_models.py index 07c5576d..904db4a5 100644 --- a/backend/app/domain/user/user_models.py +++ b/backend/app/domain/user/user_models.py @@ -55,6 +55,10 @@ class User(BaseModel): hashed_password: str created_at: datetime updated_at: datetime + # Rate limit summary (optional, populated by admin service) + bypass_rate_limit: bool | None = None + global_multiplier: float | None = None + has_custom_limits: bool | None = None class UserUpdate(BaseModel): @@ -149,3 +153,17 @@ class DomainUserUpdate(BaseModel): role: UserRole | None = None is_active: bool | None = None hashed_password: str | None = None + + +class UserDeleteResult(BaseModel): + """Result of deleting a user and optionally cascading to related data.""" + + model_config = ConfigDict(from_attributes=True) + + user_deleted: bool + executions: int = 0 + saved_scripts: int = 0 + notifications: int = 0 + user_settings: int = 0 + events: int = 0 + sagas: int = 0 diff --git a/backend/app/events/event_store_consumer.py b/backend/app/events/event_store_consumer.py index 41135a95..1dbdb83c 100644 --- a/backend/app/events/event_store_consumer.py +++ b/backend/app/events/event_store_consumer.py @@ -54,7 +54,7 @@ async def _on_start(self) -> None: self._last_batch_time = asyncio.get_running_loop().time() config = ConsumerConfig( bootstrap_servers=self.settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"{self.group_id}.{self.settings.KAFKA_GROUP_SUFFIX}", + group_id=self.group_id, enable_auto_commit=False, max_poll_records=self.batch_size, session_timeout_ms=self.settings.KAFKA_SESSION_TIMEOUT_MS, diff --git a/backend/app/schemas_pydantic/admin_user_overview.py b/backend/app/schemas_pydantic/admin_user_overview.py index 20285c50..b13db19d 100644 --- a/backend/app/schemas_pydantic/admin_user_overview.py +++ b/backend/app/schemas_pydantic/admin_user_overview.py @@ -1,9 +1,10 @@ from __future__ import annotations -from typing import Any, Dict, List +from typing import List from pydantic import BaseModel, ConfigDict +from app.domain.events.typed import DomainEvent from app.schemas_pydantic.events import EventStatistics from app.schemas_pydantic.user import UserResponse @@ -31,6 +32,6 @@ class AdminUserOverview(BaseModel): stats: EventStatistics derived_counts: DerivedCounts rate_limit_summary: RateLimitSummary - recent_events: List[Dict[str, Any]] = [] + recent_events: List[DomainEvent] = [] model_config = ConfigDict(from_attributes=True) diff --git a/backend/app/schemas_pydantic/events.py b/backend/app/schemas_pydantic/events.py index 854c6a21..6425dc08 100644 --- a/backend/app/schemas_pydantic/events.py +++ b/backend/app/schemas_pydantic/events.py @@ -1,16 +1,10 @@ -"""Event-related schemas for REST API endpoints. - -This module contains Pydantic models for event-related API requests and responses. -For Avro-based event schemas used in Kafka streaming, see app.schemas_avro.event_schemas. -""" - from datetime import datetime, timedelta, timezone from typing import Any, Dict, List from uuid import uuid4 from pydantic import BaseModel, ConfigDict, Field, field_validator -from app.domain.enums.common import SortOrder +from app.domain.enums.common import Environment, SortOrder from app.domain.enums.events import EventType @@ -34,22 +28,20 @@ class EventMetadataResponse(BaseModel): user_id: str | None = None ip_address: str | None = None user_agent: str | None = None - environment: str = "production" + environment: Environment = Environment.PRODUCTION class EventResponse(BaseModel): - model_config = ConfigDict(from_attributes=True) + """API response schema for events. Captures all event-specific fields via extra='allow'.""" + + model_config = ConfigDict(from_attributes=True, extra="allow") event_id: str event_type: EventType - event_version: str + event_version: str = "1.0" timestamp: datetime aggregate_id: str | None = None - correlation_id: str | None = None - causation_id: str | None = None metadata: EventMetadataResponse - payload: Dict[str, Any] - stored_at: datetime | None = None class EventListResponse(BaseModel): @@ -302,7 +294,7 @@ class ReplayAggregateResponse(BaseModel): dry_run: bool aggregate_id: str event_count: int | None = None - event_types: List[str] | None = None + event_types: List[EventType] | None = None start_time: datetime | None = None end_time: datetime | None = None replayed_count: int | None = None diff --git a/backend/app/schemas_pydantic/execution.py b/backend/app/schemas_pydantic/execution.py index 12226be0..9331baf8 100644 --- a/backend/app/schemas_pydantic/execution.py +++ b/backend/app/schemas_pydantic/execution.py @@ -1,12 +1,10 @@ from __future__ import annotations from datetime import datetime, timezone -from typing import Any from uuid import uuid4 from pydantic import BaseModel, ConfigDict, Field, model_validator -from app.domain.enums.events import EventType from app.domain.enums.execution import ExecutionStatus from app.domain.enums.storage import ExecutionErrorType from app.runtime_registry import SUPPORTED_RUNTIMES @@ -149,17 +147,6 @@ class RetryExecutionRequest(BaseModel): preserve_output: bool = Field(False, description="Keep output from previous attempt") -class ExecutionEventResponse(BaseModel): - """Model for execution event response.""" - - event_id: str - event_type: EventType - timestamp: datetime - payload: dict[str, Any] - - model_config = ConfigDict(from_attributes=True) - - class ExecutionListResponse(BaseModel): """Model for paginated execution list.""" diff --git a/backend/app/schemas_pydantic/sse.py b/backend/app/schemas_pydantic/sse.py index 4a4b80da..5b50c02a 100644 --- a/backend/app/schemas_pydantic/sse.py +++ b/backend/app/schemas_pydantic/sse.py @@ -6,7 +6,7 @@ from app.domain.enums.events import EventType from app.domain.enums.execution import ExecutionStatus from app.domain.enums.notification import NotificationSeverity, NotificationStatus -from app.domain.enums.sse import SSEControlEvent, SSENotificationEvent +from app.domain.enums.sse import SSEControlEvent, SSEHealthStatus, SSENotificationEvent from app.schemas_pydantic.execution import ExecutionResult, ResourceUsage # Type variable for generic Redis message parsing @@ -115,7 +115,7 @@ class ShutdownStatusResponse(BaseModel): class SSEHealthResponse(BaseModel): """Response model for SSE health check.""" - status: str = Field(description="Health status: healthy or draining") + status: SSEHealthStatus = Field(description="Health status: healthy or draining") kafka_enabled: bool = Field(True, description="Whether Kafka features are enabled") active_connections: int = Field(description="Total number of active SSE connections") active_executions: int = Field(description="Number of executions being monitored") diff --git a/backend/app/schemas_pydantic/user.py b/backend/app/schemas_pydantic/user.py index 7ef35d84..22a9c889 100644 --- a/backend/app/schemas_pydantic/user.py +++ b/backend/app/schemas_pydantic/user.py @@ -5,6 +5,7 @@ from pydantic import BaseModel, ConfigDict, EmailStr, Field from app.domain.enums.user import UserRole +from app.domain.rate_limit import EndpointGroup, RateLimitAlgorithm class UserBase(BaseModel): @@ -138,7 +139,13 @@ class DeleteUserResponse(BaseModel): """Response model for user deletion.""" message: str - deleted_counts: dict[str, int] + user_deleted: bool + executions: int = 0 + saved_scripts: int = 0 + notifications: int = 0 + user_settings: int = 0 + events: int = 0 + sagas: int = 0 model_config = ConfigDict(from_attributes=True) @@ -158,6 +165,28 @@ class RateLimitRuleResponse(BaseModel): model_config = ConfigDict(from_attributes=True) +class RateLimitRuleRequest(BaseModel): + """Request model for rate limit rule.""" + + endpoint_pattern: str + group: EndpointGroup + requests: int + window_seconds: int + algorithm: RateLimitAlgorithm = RateLimitAlgorithm.SLIDING_WINDOW + burst_multiplier: float = 1.5 + priority: int = 0 + enabled: bool = True + + +class RateLimitUpdateRequest(BaseModel): + """Request model for updating user rate limits.""" + + bypass_rate_limit: bool = False + global_multiplier: float = 1.0 + rules: list[RateLimitRuleRequest] = [] + notes: Optional[str] = None + + class UserRateLimitConfigResponse(BaseModel): """Response model for user rate limit config.""" diff --git a/backend/app/services/admin/admin_user_service.py b/backend/app/services/admin/admin_user_service.py index d8975de9..619b2208 100644 --- a/backend/app/services/admin/admin_user_service.py +++ b/backend/app/services/admin/admin_user_service.py @@ -9,7 +9,7 @@ from app.domain.enums.execution import ExecutionStatus from app.domain.enums.user import UserRole from app.domain.rate_limit import RateLimitUpdateResult, UserRateLimit, UserRateLimitsResult -from app.domain.user import DomainUserCreate, PasswordReset, User, UserListResult, UserUpdate +from app.domain.user import DomainUserCreate, PasswordReset, User, UserDeleteResult, UserListResult, UserUpdate from app.schemas_pydantic.user import UserCreate from app.services.event_service import EventService from app.services.execution_service import ExecutionService @@ -84,7 +84,7 @@ def _count(status: ExecutionStatus) -> int: ] recent_result = await self._events.get_user_events_paginated( user_id=user_id, - event_types=[str(et) for et in event_types], + event_types=event_types, start_time=start, end_time=now, limit=10, @@ -115,7 +115,20 @@ async def list_users( }, ) - return await self._users.list_users(limit=limit, offset=offset, search=search, role=role) + result = await self._users.list_users(limit=limit, offset=offset, search=search, role=role) + + # Enrich users with rate limit summaries + summaries = await self._rate_limits.get_user_rate_limit_summaries([u.user_id for u in result.users]) + enriched_users = [ + user.model_copy(update={ + "bypass_rate_limit": s.bypass_rate_limit, + "global_multiplier": s.global_multiplier, + "has_custom_limits": s.has_custom_limits, + }) if (s := summaries.get(user.user_id)) else user + for user in result.users + ] + + return UserListResult(users=enriched_users, total=result.total, offset=result.offset, limit=result.limit) async def create_user(self, *, admin_username: str, user_data: UserCreate) -> User: """Create a new user and return domain user.""" @@ -157,17 +170,17 @@ async def update_user(self, *, admin_username: str, user_id: str, update: UserUp ) return await self._users.update_user(user_id, update) - async def delete_user(self, *, admin_username: str, user_id: str, cascade: bool) -> dict[str, int]: + async def delete_user(self, *, admin_username: str, user_id: str, cascade: bool) -> UserDeleteResult: self.logger.info( "Admin deleting user", extra={"admin_username": admin_username, "target_user_id": user_id, "cascade": cascade}, ) # Reset rate limits prior to deletion await self._rate_limits.reset_user_limits(user_id) - deleted_counts = await self._users.delete_user(user_id, cascade=cascade) - if deleted_counts.get("user", 0) > 0: + result = await self._users.delete_user(user_id, cascade=cascade) + if result.user_deleted: self.logger.info("User deleted successfully", extra={"target_user_id": user_id}) - return deleted_counts + return result async def reset_user_password(self, *, admin_username: str, user_id: str, new_password: str) -> bool: self.logger.info( diff --git a/backend/app/services/coordinator/coordinator.py b/backend/app/services/coordinator/coordinator.py index 5f93ceb6..6e43e633 100644 --- a/backend/app/services/coordinator/coordinator.py +++ b/backend/app/services/coordinator/coordinator.py @@ -121,7 +121,7 @@ async def _on_start(self) -> None: consumer_config = ConsumerConfig( bootstrap_servers=self.kafka_servers, - group_id=f"{self.consumer_group}.{self._settings.KAFKA_GROUP_SUFFIX}", + group_id=self.consumer_group, enable_auto_commit=False, session_timeout_ms=self._settings.KAFKA_SESSION_TIMEOUT_MS, heartbeat_interval_ms=self._settings.KAFKA_HEARTBEAT_INTERVAL_MS, diff --git a/backend/app/services/event_service.py b/backend/app/services/event_service.py index d44d9d7c..43cf38bb 100644 --- a/backend/app/services/event_service.py +++ b/backend/app/services/event_service.py @@ -89,7 +89,7 @@ async def get_execution_events( async def get_user_events_paginated( self, user_id: str, - event_types: list[str] | None = None, + event_types: list[EventType] | None = None, start_time: datetime | None = None, end_time: datetime | None = None, limit: int = 100, diff --git a/backend/app/services/k8s_worker/worker.py b/backend/app/services/k8s_worker/worker.py index cd9af936..fed3afae 100644 --- a/backend/app/services/k8s_worker/worker.py +++ b/backend/app/services/k8s_worker/worker.py @@ -108,7 +108,7 @@ async def _on_start(self) -> None: # Create consumer configuration consumer_config = ConsumerConfig( bootstrap_servers=self.kafka_servers, - group_id=f"{self.config.consumer_group}.{self._settings.KAFKA_GROUP_SUFFIX}", + group_id=self.config.consumer_group, enable_auto_commit=False, session_timeout_ms=self._settings.KAFKA_SESSION_TIMEOUT_MS, heartbeat_interval_ms=self._settings.KAFKA_HEARTBEAT_INTERVAL_MS, diff --git a/backend/app/services/notification_service.py b/backend/app/services/notification_service.py index 780f1279..53491939 100644 --- a/backend/app/services/notification_service.py +++ b/backend/app/services/notification_service.py @@ -221,7 +221,7 @@ async def _subscribe_to_events(self) -> None: # Configure consumer for notification-relevant events consumer_config = ConsumerConfig( bootstrap_servers=self.settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"{GroupId.NOTIFICATION_SERVICE}.{self.settings.KAFKA_GROUP_SUFFIX}", + group_id=GroupId.NOTIFICATION_SERVICE, max_poll_records=10, enable_auto_commit=True, auto_offset_reset="latest", diff --git a/backend/app/services/result_processor/processor.py b/backend/app/services/result_processor/processor.py index 3f9864db..d430804a 100644 --- a/backend/app/services/result_processor/processor.py +++ b/backend/app/services/result_processor/processor.py @@ -120,7 +120,7 @@ async def _create_consumer(self) -> IdempotentConsumerWrapper: """Create and configure idempotent Kafka consumer.""" consumer_config = ConsumerConfig( bootstrap_servers=self._settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"{self.config.consumer_group}.{self._settings.KAFKA_GROUP_SUFFIX}", + group_id=self.config.consumer_group, max_poll_records=1, enable_auto_commit=True, auto_offset_reset="earliest", diff --git a/backend/app/services/result_processor/resource_cleaner.py b/backend/app/services/result_processor/resource_cleaner.py index db6ff518..2ce67695 100644 --- a/backend/app/services/result_processor/resource_cleaner.py +++ b/backend/app/services/result_processor/resource_cleaner.py @@ -5,10 +5,10 @@ from typing import Any from kubernetes import client as k8s_client -from kubernetes import config as k8s_config from kubernetes.client.rest import ApiException -from app.domain.exceptions import InfrastructureError, InvalidStateError +from app.core.k8s_clients import K8sClients +from app.domain.exceptions import InfrastructureError # Python 3.12 type aliases type ResourceDict = dict[str, list[str]] @@ -16,34 +16,15 @@ class ResourceCleaner: - """Service for cleaning up Kubernetes resources""" + """Service for cleaning up Kubernetes resources. - def __init__(self, logger: logging.Logger) -> None: - self.v1: k8s_client.CoreV1Api | None = None - self.networking_v1: k8s_client.NetworkingV1Api | None = None - self._initialized = False - self.logger = logger - - async def initialize(self) -> None: - """Initialize Kubernetes clients""" - if self._initialized: - return - - try: - try: - k8s_config.load_incluster_config() - self.logger.info("Using in-cluster Kubernetes config") - except k8s_config.ConfigException: - k8s_config.load_kube_config() - self.logger.info("Using kubeconfig") + Accepts K8sClients via dependency injection for proper configuration management. + """ - self.v1 = k8s_client.CoreV1Api() - self.networking_v1 = k8s_client.NetworkingV1Api() - self._initialized = True - - except Exception as e: - self.logger.error(f"Failed to initialize Kubernetes client: {e}") - raise InfrastructureError(f"Kubernetes initialization failed: {e}") from e + def __init__(self, k8s_clients: K8sClients, logger: logging.Logger) -> None: + self.v1: k8s_client.CoreV1Api = k8s_clients.v1 + self.networking_v1: k8s_client.NetworkingV1Api = k8s_clients.networking_v1 + self.logger = logger async def cleanup_pod_resources( self, @@ -54,7 +35,6 @@ async def cleanup_pod_resources( delete_pvcs: bool = False, ) -> None: """Clean up all resources associated with a pod""" - await self.initialize() self.logger.info(f"Cleaning up resources for pod: {pod_name}") try: @@ -83,9 +63,6 @@ async def cleanup_pod_resources( async def _delete_pod(self, pod_name: str, namespace: str) -> None: """Delete a pod""" - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - try: loop = asyncio.get_running_loop() await loop.run_in_executor(None, self.v1.read_namespaced_pod, pod_name, namespace) @@ -105,9 +82,6 @@ async def _delete_pod(self, pod_name: str, namespace: str) -> None: async def _delete_configmaps(self, execution_id: str, namespace: str) -> None: """Delete ConfigMaps for an execution""" - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - await self._delete_labeled_resources( execution_id, namespace, @@ -118,9 +92,6 @@ async def _delete_configmaps(self, execution_id: str, namespace: str) -> None: async def _delete_pvcs(self, execution_id: str, namespace: str) -> None: """Delete PersistentVolumeClaims for an execution""" - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - await self._delete_labeled_resources( execution_id, namespace, @@ -153,8 +124,6 @@ async def cleanup_orphaned_resources( dry_run: bool = False, ) -> ResourceDict: """Clean up orphaned resources older than specified age""" - await self.initialize() - cutoff_time = datetime.now(timezone.utc) - timedelta(hours=max_age_hours) cleaned: ResourceDict = { "pods": [], @@ -176,9 +145,6 @@ async def _cleanup_orphaned_pods( self, namespace: str, cutoff_time: datetime, cleaned: ResourceDict, dry_run: bool ) -> None: """Clean up orphaned pods""" - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - loop = asyncio.get_running_loop() pods = await loop.run_in_executor( None, partial(self.v1.list_namespaced_pod, namespace, label_selector="app=integr8s") @@ -203,9 +169,6 @@ async def _cleanup_orphaned_configmaps( self, namespace: str, cutoff_time: datetime, cleaned: ResourceDict, dry_run: bool ) -> None: """Clean up orphaned ConfigMaps""" - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - loop = asyncio.get_running_loop() configmaps = await loop.run_in_executor( None, partial(self.v1.list_namespaced_config_map, namespace, label_selector="app=integr8s") @@ -225,8 +188,6 @@ async def _cleanup_orphaned_configmaps( async def get_resource_usage(self, namespace: str = "default") -> CountDict: """Get current resource usage counts""" - await self.initialize() - loop = asyncio.get_running_loop() label_selector = "app=integr8s" @@ -235,9 +196,6 @@ async def get_resource_usage(self, namespace: str = "default") -> CountDict: try: # Get pods count try: - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - pods = await loop.run_in_executor( None, partial(self.v1.list_namespaced_pod, namespace, label_selector=label_selector) ) @@ -248,9 +206,6 @@ async def get_resource_usage(self, namespace: str = "default") -> CountDict: # Get configmaps count try: - if not self.v1: - raise InvalidStateError("Kubernetes client not initialized") - configmaps = await loop.run_in_executor( None, partial(self.v1.list_namespaced_config_map, namespace, label_selector=label_selector) ) @@ -261,9 +216,6 @@ async def get_resource_usage(self, namespace: str = "default") -> CountDict: # Get network policies count try: - if not self.networking_v1: - raise InvalidStateError("Kubernetes networking client not initialized") - policies = await loop.run_in_executor( None, partial( diff --git a/backend/app/services/saga/saga_orchestrator.py b/backend/app/services/saga/saga_orchestrator.py index 194d6ac3..eee96ad0 100644 --- a/backend/app/services/saga/saga_orchestrator.py +++ b/backend/app/services/saga/saga_orchestrator.py @@ -120,7 +120,7 @@ async def _start_consumer(self) -> None: consumer_config = ConsumerConfig( bootstrap_servers=self._settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"saga-{self.config.name}.{self._settings.KAFKA_GROUP_SUFFIX}", + group_id=f"saga-{self.config.name}", enable_auto_commit=False, session_timeout_ms=self._settings.KAFKA_SESSION_TIMEOUT_MS, heartbeat_interval_ms=self._settings.KAFKA_HEARTBEAT_INTERVAL_MS, diff --git a/backend/app/services/saved_script_service.py b/backend/app/services/saved_script_service.py index adedd344..8c8343ed 100644 --- a/backend/app/services/saved_script_service.py +++ b/backend/app/services/saved_script_service.py @@ -94,7 +94,13 @@ async def delete_saved_script(self, script_id: str, user_id: str) -> None: }, ) - await self.saved_script_repo.delete_saved_script(script_id, user_id) + deleted = await self.saved_script_repo.delete_saved_script(script_id, user_id) + if not deleted: + self.logger.warning( + "Script not found for user", + extra={"user_id": user_id, "script_id": script_id}, + ) + raise SavedScriptNotFoundError(script_id) self.logger.info( "Successfully deleted script", diff --git a/backend/app/services/sse/kafka_redis_bridge.py b/backend/app/services/sse/kafka_redis_bridge.py index 07e03c44..0a4eb780 100644 --- a/backend/app/services/sse/kafka_redis_bridge.py +++ b/backend/app/services/sse/kafka_redis_bridge.py @@ -63,11 +63,10 @@ async def _on_stop(self) -> None: def _build_consumer(self, consumer_index: int) -> UnifiedConsumer: """Build a consumer instance without starting it.""" - suffix = self.settings.KAFKA_GROUP_SUFFIX config = ConsumerConfig( bootstrap_servers=self.settings.KAFKA_BOOTSTRAP_SERVERS, - group_id=f"sse-bridge-pool.{suffix}", - client_id=f"sse-bridge-{consumer_index}.{suffix}", + group_id="sse-bridge-pool", + client_id=f"sse-bridge-{consumer_index}", enable_auto_commit=True, auto_offset_reset="latest", max_poll_interval_ms=self.settings.KAFKA_MAX_POLL_INTERVAL_MS, diff --git a/backend/app/services/sse/sse_service.py b/backend/app/services/sse/sse_service.py index e474fc41..3d993a38 100644 --- a/backend/app/services/sse/sse_service.py +++ b/backend/app/services/sse/sse_service.py @@ -7,7 +7,7 @@ from app.core.metrics import ConnectionMetrics from app.db.repositories.sse_repository import SSERepository from app.domain.enums.events import EventType -from app.domain.enums.sse import SSEControlEvent, SSENotificationEvent +from app.domain.enums.sse import SSEControlEvent, SSEHealthStatus, SSENotificationEvent from app.domain.sse import SSEHealthDomain from app.schemas_pydantic.execution import ExecutionResult from app.schemas_pydantic.sse import ( @@ -261,7 +261,7 @@ async def create_notification_stream(self, user_id: str) -> AsyncGenerator[Dict[ async def get_health_status(self) -> SSEHealthDomain: router_stats = self.router.get_stats() return SSEHealthDomain( - status="draining" if self.shutdown_manager.is_shutting_down() else "healthy", + status=SSEHealthStatus.DRAINING if self.shutdown_manager.is_shutting_down() else SSEHealthStatus.HEALTHY, kafka_enabled=True, active_connections=router_stats["active_executions"], active_executions=router_stats["active_executions"], diff --git a/backend/app/settings.py b/backend/app/settings.py index fd510051..0a5047dd 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -51,7 +51,6 @@ class Settings(BaseSettings): # Event-Driven Design Configuration KAFKA_BOOTSTRAP_SERVERS: str = "kafka:29092" - KAFKA_GROUP_SUFFIX: str = "suff" # Suffix to append to consumer group IDs for test/parallel isolation SCHEMA_REGISTRY_URL: str = "http://schema-registry:8081" SCHEMA_REGISTRY_AUTH: str = Field( default="", diff --git a/backend/pyproject.toml b/backend/pyproject.toml index febd8c01..375b8c95 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -198,7 +198,6 @@ python_files = ["test_*.py"] python_classes = ["Test*"] python_functions = ["test_*"] markers = [ - "integration: marks tests as integration tests", "e2e: marks tests as end-to-end tests requiring full system", "unit: marks tests as unit tests", "slow: marks tests as slow running", @@ -206,7 +205,8 @@ markers = [ "mongodb: marks tests as requiring MongoDB", "redis: marks tests as requiring Redis", "k8s: marks tests as requiring Kubernetes", - "performance: marks tests as performance tests" + "performance: marks tests as performance tests", + "admin: marks tests as admin-only functionality tests", ] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "session" diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index aeadecd4..bb96bf9d 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -1,7 +1,6 @@ -import asyncio import os import uuid -from contextlib import asynccontextmanager +from collections.abc import Iterable from typing import AsyncGenerator import httpx @@ -9,72 +8,30 @@ import pytest_asyncio import redis.asyncio as redis from app.core.database_context import Database +from app.domain.events.typed import EventMetadata, ExecutionRequestedEvent from app.main import create_app from app.settings import Settings from dishka import AsyncContainer from fastapi import FastAPI from httpx import ASGITransport -from scripts.create_topics import create_topics - -# ===== Worker-specific isolation for pytest-xdist ===== -# Supports both xdist workers AND multiple independent pytest processes. -# -# TEST_RUN_ID: Unique identifier for this pytest process (set by CI or auto-generated). -# Allows running backend-integration, backend-e2e, frontend-e2e in parallel. -# PYTEST_XDIST_WORKER: Worker ID within a single pytest-xdist run (gw0, gw1, etc.) -# -# Combined, these give full isolation: each test worker in each pytest process is unique. -_RUN_ID = os.environ.get("TEST_RUN_ID") or uuid.uuid4().hex[:8] -_WORKER_ID = os.environ.get("PYTEST_XDIST_WORKER", "gw0") -_WORKER_NUM = int(_WORKER_ID.removeprefix("gw") or "0") -_ISOLATION_KEY = f"{_RUN_ID}_{_WORKER_ID}" - - -# ===== Pytest hooks ===== -@pytest.hookimpl(trylast=True) -def pytest_configure() -> None: - """Create Kafka topics once in master process before xdist workers spawn.""" - # PYTEST_XDIST_WORKER is only set in workers, not master - if os.environ.get("PYTEST_XDIST_WORKER"): - return - try: - asyncio.run(create_topics(Settings(_env_file=".env.test"))) - except Exception: - pass # Kafka unavailable (unit tests) - -# ===== Settings fixture ===== -@pytest.fixture(scope="session") -def test_settings() -> Settings: - """Provide test settings with per-worker isolation where needed. - Uses pydantic-settings _env_file parameter to load .env.test at instantiation, - overriding the class-level default of .env. +def _get_worker_num() -> int: + """Get numeric pytest-xdist worker ID for Redis DB selection (0-15).""" + wid = os.environ.get("PYTEST_XDIST_WORKER", "main") + return 0 if wid == "main" else int(wid.removeprefix("gw")) - What gets isolated per worker (to prevent interference): - - DATABASE_NAME: Each worker gets its own MongoDB database - - REDIS_DB: Each worker gets its own Redis database (0-15, hash-distributed) - - KAFKA_GROUP_SUFFIX: Each worker gets unique consumer groups - What's SHARED (from env, no per-worker suffix): - - KAFKA_TOPIC_PREFIX: Topics created once by CI/scripts - - SCHEMA_SUBJECT_PREFIX: Schemas shared across workers +@pytest.fixture(scope="session") +def test_settings() -> Settings: + """Test settings with per-worker Redis DB isolation. - Isolation works across: - - xdist workers within a single pytest process (gw0, gw1, ...) - - Multiple independent pytest processes (via TEST_RUN_ID or auto-UUID) + - MongoDB: Shared database, tests use UUIDs for entity isolation + - Kafka: Tests with consumers use xdist_group markers for serial execution + - Redis: Per-worker DB number (0-15) to avoid key collisions """ base = Settings(_env_file=".env.test") - # Deterministic Redis DB: worker number + ASCII sum of RUN_ID (no hash randomization) - redis_db = (_WORKER_NUM + sum(ord(c) for c in _RUN_ID)) % 16 - return base.model_copy( - update={ - # Per-worker isolation - uses _ISOLATION_KEY which includes RUN_ID + WORKER_ID - "DATABASE_NAME": f"integr8scode_test_{_ISOLATION_KEY}", - "REDIS_DB": redis_db, - "KAFKA_GROUP_SUFFIX": _ISOLATION_KEY, - } - ) + return base.model_copy(update={"REDIS_DB": _get_worker_num() % 16}) # ===== App fixture ===== @@ -88,25 +45,14 @@ async def app(test_settings: Settings) -> AsyncGenerator[FastAPI, None]: Uses lifespan_context to trigger startup/shutdown events, which initializes Beanie, metrics, and other services through the normal DI flow. - Cleanup: Best-effort drop of test database. May not always succeed due to - known MongoDB driver behavior when client stays connected, but ulimits on - MongoDB container (65536) prevent file descriptor exhaustion regardless. + Note: Database is shared across all tests and workers. Tests use unique IDs + so they don't conflict. Periodic cleanup of stale test data can be done + outside of tests if needed. """ application = create_app(settings=test_settings) async with application.router.lifespan_context(application): yield application - # Best-effort cleanup (may fail silently due to MongoDB driver behavior) - container: AsyncContainer = application.state.dishka_container - db: Database = await container.get(Database) - await db.client.drop_database(test_settings.DATABASE_NAME) - - -@pytest_asyncio.fixture(scope="session") -async def app_container(app: FastAPI) -> AsyncContainer: - """Expose the Dishka container attached to the app.""" - container: AsyncContainer = app.state.dishka_container - return container @pytest_asyncio.fixture @@ -121,15 +67,11 @@ async def client(app: FastAPI) -> AsyncGenerator[httpx.AsyncClient, None]: yield c -@asynccontextmanager -async def _container_scope(container: AsyncContainer) -> AsyncGenerator[AsyncContainer, None]: - async with container() as scope: - yield scope - - @pytest_asyncio.fixture -async def scope(app_container: AsyncContainer) -> AsyncGenerator[AsyncContainer, None]: - async with _container_scope(app_container) as s: +async def scope(app: FastAPI) -> AsyncGenerator[AsyncContainer, None]: + """Create a Dishka scope for resolving dependencies in tests.""" + container: AsyncContainer = app.state.dishka_container + async with container() as s: yield s @@ -235,3 +177,52 @@ async def another_user(app: FastAPI) -> AsyncGenerator[httpx.AsyncClient, None]: ) yield c await c.aclose() + + +# ===== Event factories ===== + + +def make_execution_requested_event( + *, + execution_id: str | None = None, + script: str = "print('hello')", + language: str = "python", + language_version: str = "3.11", + runtime_image: str = "python:3.11-slim", + runtime_command: Iterable[str] = ("python",), + runtime_filename: str = "main.py", + timeout_seconds: int = 5, + cpu_limit: str = "100m", + memory_limit: str = "128Mi", + cpu_request: str = "50m", + memory_request: str = "64Mi", + priority: int = 5, + service_name: str = "tests", + service_version: str = "1.0.0", + user_id: str | None = None, +) -> ExecutionRequestedEvent: + """Factory for ExecutionRequestedEvent with sensible defaults. + + Override any field via keyword args. If no execution_id is provided, a random one is generated. + """ + if execution_id is None: + execution_id = f"exec-{uuid.uuid4().hex[:8]}" + + metadata = EventMetadata(service_name=service_name, service_version=service_version, user_id=user_id) + return ExecutionRequestedEvent( + execution_id=execution_id, + aggregate_id=execution_id, # Match production: aggregate_id == execution_id for execution events + script=script, + language=language, + language_version=language_version, + runtime_image=runtime_image, + runtime_command=list(runtime_command), + runtime_filename=runtime_filename, + timeout_seconds=timeout_seconds, + cpu_limit=cpu_limit, + memory_limit=memory_limit, + cpu_request=cpu_request, + memory_request=memory_request, + priority=priority, + metadata=metadata, + ) diff --git a/backend/tests/integration/app/__init__.py b/backend/tests/e2e/app/__init__.py similarity index 100% rename from backend/tests/integration/app/__init__.py rename to backend/tests/e2e/app/__init__.py diff --git a/backend/tests/e2e/app/test_main_app.py b/backend/tests/e2e/app/test_main_app.py new file mode 100644 index 00000000..83aff9c8 --- /dev/null +++ b/backend/tests/e2e/app/test_main_app.py @@ -0,0 +1,327 @@ +import logging +from importlib import import_module +from typing import Any + +import pytest +import redis.asyncio as aioredis +from app.core.database_context import Database +from app.domain.exceptions import DomainError +from app.events.schema.schema_registry import SchemaRegistryManager +from app.settings import Settings +from dishka import AsyncContainer +from fastapi import FastAPI +from starlette.routing import Mount, Route + +pytestmark = pytest.mark.e2e + + +class TestAppInstance: + """Tests for FastAPI app instance creation.""" + + def test_app_is_fastapi_instance(self, app: FastAPI) -> None: + """App is a FastAPI instance.""" + assert isinstance(app, FastAPI) + + def test_app_title_matches_settings( + self, app: FastAPI, test_settings: Settings + ) -> None: + """App title matches PROJECT_NAME from settings.""" + assert app.title == test_settings.PROJECT_NAME + + def test_openapi_disabled_for_security(self, app: FastAPI) -> None: + """OpenAPI/docs endpoints are disabled in production mode.""" + # OpenAPI is disabled in create_app for security + assert app.openapi_url is None + assert app.docs_url is None + assert app.redoc_url is None + + +class TestRouterConfiguration: + """Tests for API router registration.""" + + def test_api_routes_registered(self, app: FastAPI) -> None: + """API routes are registered under /api/ prefix.""" + paths = {r.path for r in app.router.routes if isinstance(r, Route)} + assert any(p.startswith("/api/") for p in paths) + + def test_health_routes_registered(self, app: FastAPI) -> None: + """Health check routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/health/live" in paths + assert "/api/v1/health/ready" in paths + + def test_auth_routes_registered(self, app: FastAPI) -> None: + """Authentication routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/auth/login" in paths + assert "/api/v1/auth/register" in paths + assert "/api/v1/auth/logout" in paths + assert "/api/v1/auth/me" in paths + + def test_execution_routes_registered(self, app: FastAPI) -> None: + """Execution routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/execute" in paths + assert "/api/v1/user/executions" in paths + assert "/api/v1/k8s-limits" in paths + assert "/api/v1/example-scripts" in paths + + def test_saved_scripts_routes_registered(self, app: FastAPI) -> None: + """Saved scripts routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/scripts" in paths + + def test_user_settings_routes_registered(self, app: FastAPI) -> None: + """User settings routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/user/settings" in paths + + def test_notifications_routes_registered(self, app: FastAPI) -> None: + """Notification routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/notifications" in paths + + def test_saga_routes_registered(self, app: FastAPI) -> None: + """Saga routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/sagas" in paths + + def test_replay_routes_registered(self, app: FastAPI) -> None: + """Replay routes are registered (admin only).""" + paths = self._get_all_paths(app) + assert "/api/v1/replay/sessions" in paths + + def test_dlq_routes_registered(self, app: FastAPI) -> None: + """DLQ routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/dlq/messages" in paths + + def test_events_routes_registered(self, app: FastAPI) -> None: + """Events routes are registered.""" + paths = self._get_all_paths(app) + # SSE endpoint + assert any("/api/v1/events" in p for p in paths) + + def test_admin_routes_registered(self, app: FastAPI) -> None: + """Admin routes are registered.""" + paths = self._get_all_paths(app) + assert "/api/v1/admin/users" in paths + assert "/api/v1/admin/settings" in paths + assert "/api/v1/admin/events" in paths + + def _get_all_paths(self, app: FastAPI) -> set[str]: + """Extract all route paths from app, including mounted routers.""" + paths: set[str] = set() + for route in app.router.routes: + if isinstance(route, Route): + paths.add(route.path) + elif isinstance(route, Mount) and route.routes is not None: + # For mounted routers, combine mount path with route paths + for sub_route in route.routes: + if isinstance(sub_route, Route): + paths.add(f"{route.path}{sub_route.path}") + return paths + + +class TestMiddlewareStack: + """Tests for middleware configuration.""" + + def test_cors_middleware_configured(self, app: FastAPI) -> None: + """CORS middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "CORSMiddleware" in middleware_classes + + def test_correlation_middleware_configured(self, app: FastAPI) -> None: + """Correlation ID middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "CorrelationMiddleware" in middleware_classes + + def test_request_size_limit_middleware_configured(self, app: FastAPI) -> None: + """Request size limit middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "RequestSizeLimitMiddleware" in middleware_classes + + def test_cache_control_middleware_configured(self, app: FastAPI) -> None: + """Cache control middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "CacheControlMiddleware" in middleware_classes + + def test_metrics_middleware_configured(self, app: FastAPI) -> None: + """Metrics middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "MetricsMiddleware" in middleware_classes + + def test_rate_limit_middleware_configured(self, app: FastAPI) -> None: + """Rate limit middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "RateLimitMiddleware" in middleware_classes + + def test_csrf_middleware_configured(self, app: FastAPI) -> None: + """CSRF middleware is configured.""" + middleware_classes = self._get_middleware_class_names(app) + assert "CSRFMiddleware" in middleware_classes + + def test_middleware_count(self, app: FastAPI) -> None: + """Expected number of middlewares are configured.""" + # CORS, Correlation, RequestSizeLimit, CacheControl, Metrics, RateLimit, CSRF + middleware_classes = self._get_middleware_class_names(app) + expected_middlewares = { + "CORSMiddleware", + "CorrelationMiddleware", + "RequestSizeLimitMiddleware", + "CacheControlMiddleware", + "MetricsMiddleware", + "RateLimitMiddleware", + "CSRFMiddleware", + } + assert expected_middlewares.issubset(middleware_classes) + + def _get_middleware_class_names(self, app: FastAPI) -> set[str]: + """Get set of middleware class names from app.""" + return { + getattr(m.cls, "__name__", str(m.cls)) for m in app.user_middleware + } + + +class TestCorsConfiguration: + """Tests for CORS middleware configuration.""" + + def test_cors_allows_localhost_origins(self, app: FastAPI) -> None: + """CORS allows localhost origins for development.""" + cors_kwargs = self._get_cors_kwargs(app) + assert cors_kwargs is not None + + # Check allowed origins + allowed = cors_kwargs.get("allow_origins", []) + assert "https://localhost:5001" in allowed + assert "https://127.0.0.1:5001" in allowed + assert "https://localhost" in allowed + + def test_cors_allows_credentials(self, app: FastAPI) -> None: + """CORS allows credentials for cookie-based auth.""" + cors_kwargs = self._get_cors_kwargs(app) + assert cors_kwargs is not None + assert cors_kwargs.get("allow_credentials") is True + + def test_cors_allows_required_methods(self, app: FastAPI) -> None: + """CORS allows required HTTP methods.""" + cors_kwargs = self._get_cors_kwargs(app) + assert cors_kwargs is not None + + methods = cors_kwargs.get("allow_methods", []) + assert "GET" in methods + assert "POST" in methods + assert "PUT" in methods + assert "DELETE" in methods + + def test_cors_allows_required_headers(self, app: FastAPI) -> None: + """CORS allows required headers.""" + cors_kwargs = self._get_cors_kwargs(app) + assert cors_kwargs is not None + + headers = cors_kwargs.get("allow_headers", []) + assert "Authorization" in headers + assert "Content-Type" in headers + assert "X-CSRF-Token" in headers + assert "X-Correlation-ID" in headers + + def test_cors_exposes_correlation_header(self, app: FastAPI) -> None: + """CORS exposes X-Correlation-ID header to clients.""" + cors_kwargs = self._get_cors_kwargs(app) + assert cors_kwargs is not None + + exposed = cors_kwargs.get("expose_headers", []) + assert "X-Correlation-ID" in exposed + + def _get_cors_kwargs(self, app: FastAPI) -> dict[str, Any] | None: + """Get CORS middleware kwargs from app.""" + for m in app.user_middleware: + if getattr(m.cls, "__name__", "") == "CORSMiddleware": + return dict(m.kwargs) + return None + + +class TestDishkaContainer: + """Tests for Dishka DI container configuration.""" + + def test_container_attached_to_app_state(self, app: FastAPI) -> None: + """Dishka container is attached to app.state.""" + assert hasattr(app.state, "dishka_container") + assert app.state.dishka_container is not None + + def test_container_is_async_container(self, app: FastAPI) -> None: + """Dishka container is an AsyncContainer.""" + assert isinstance(app.state.dishka_container, AsyncContainer) + + @pytest.mark.asyncio + async def test_container_resolves_settings(self, scope: AsyncContainer) -> None: + """Container can resolve Settings.""" + settings = await scope.get(Settings) + assert isinstance(settings, Settings) + + @pytest.mark.asyncio + async def test_container_resolves_logger(self, scope: AsyncContainer) -> None: + """Container can resolve Logger.""" + logger = await scope.get(logging.Logger) + assert isinstance(logger, logging.Logger) + + +class TestExceptionHandlers: + """Tests for exception handler configuration.""" + + def test_domain_error_handler_registered(self, app: FastAPI) -> None: + """DomainError exception handler is registered.""" + # Exception handlers are stored in app.exception_handlers + assert DomainError in app.exception_handlers + + +class TestLifespanInitialization: + """Tests for app state after lifespan initialization.""" + + @pytest.mark.asyncio + async def test_beanie_initialized(self, scope: AsyncContainer) -> None: + """Beanie ODM is initialized with document models.""" + database = await scope.get(Database) + assert database is not None + # Beanie stores document models after init + # If not initialized, getting a collection would fail + + @pytest.mark.asyncio + async def test_redis_connected(self, scope: AsyncContainer) -> None: + """Redis client is connected and functional.""" + redis_client = await scope.get(aioredis.Redis) + # Ping returns a coroutine for async client + pong = await redis_client.ping() # type: ignore[misc] + assert pong is True + + @pytest.mark.asyncio + async def test_schema_registry_initialized(self, scope: AsyncContainer) -> None: + """Schema registry manager is initialized.""" + schema_registry = await scope.get(SchemaRegistryManager) + assert schema_registry is not None + + +class TestCreateAppFunction: + """Tests for create_app factory function.""" + + def test_create_app_returns_fastapi(self, test_settings: Settings) -> None: + """create_app returns a FastAPI instance.""" + create_app = import_module("app.main").create_app + instance = create_app(settings=test_settings) + assert isinstance(instance, FastAPI) + + def test_create_app_uses_provided_settings( + self, test_settings: Settings + ) -> None: + """create_app uses provided settings instead of loading from env.""" + create_app = import_module("app.main").create_app + instance = create_app(settings=test_settings) + assert instance.title == test_settings.PROJECT_NAME + + def test_create_app_without_settings_uses_defaults(self) -> None: + """create_app without settings argument creates default Settings.""" + create_app = import_module("app.main").create_app + # This will create a Settings() from env/defaults + # Just verify it doesn't crash + instance = create_app() + assert isinstance(instance, FastAPI) diff --git a/backend/tests/e2e/conftest.py b/backend/tests/e2e/conftest.py index 648dfaef..29b7a9e9 100644 --- a/backend/tests/e2e/conftest.py +++ b/backend/tests/e2e/conftest.py @@ -1,19 +1,100 @@ -from collections.abc import AsyncGenerator +import uuid +import pytest import pytest_asyncio -import redis.asyncio as redis -from app.core.database_context import Database +from app.domain.enums.user import UserRole +from app.schemas_pydantic.execution import ExecutionRequest, ExecutionResponse +from app.schemas_pydantic.saved_script import SavedScriptCreateRequest +from app.schemas_pydantic.user import UserCreate +from httpx import AsyncClient -from tests.helpers.cleanup import cleanup_db_and_redis +# --- Request fixtures --- -@pytest_asyncio.fixture(autouse=True) -async def _cleanup(db: Database, redis_client: redis.Redis) -> AsyncGenerator[None, None]: - """Clean DB and Redis before each E2E test. +@pytest.fixture +def simple_execution_request() -> ExecutionRequest: + """Simple python print execution.""" + return ExecutionRequest(script="print('test')", lang="python", lang_version="3.11") - Only pre-test cleanup - post-test cleanup causes event loop issues - when SSE/streaming tests hold connections across loop boundaries. - """ - await cleanup_db_and_redis(db, redis_client) - yield - # No post-test cleanup to avoid "Event loop is closed" errors + +@pytest.fixture +def long_running_execution_request() -> ExecutionRequest: + """30 second sleep execution.""" + return ExecutionRequest( + script="import time; time.sleep(30); print('done')", + lang="python", + lang_version="3.11", + ) + + +@pytest.fixture +def error_execution_request() -> ExecutionRequest: + """Execution that raises an error.""" + return ExecutionRequest( + script="raise ValueError('test error')", + lang="python", + lang_version="3.11", + ) + + +@pytest.fixture +def new_user_request() -> UserCreate: + """Unique user registration request.""" + uid = uuid.uuid4().hex[:8] + return UserCreate( + username=f"user_{uid}", + email=f"user_{uid}@test.com", + password="SecurePass123!", + role=UserRole.USER, + ) + + +@pytest.fixture +def new_admin_request() -> UserCreate: + """Unique admin registration request.""" + uid = uuid.uuid4().hex[:8] + return UserCreate( + username=f"admin_{uid}", + email=f"admin_{uid}@test.com", + password="SecurePass123!", + role=UserRole.ADMIN, + ) + + +@pytest.fixture +def new_script_request() -> SavedScriptCreateRequest: + """Unique saved script request.""" + uid = uuid.uuid4().hex[:8] + return SavedScriptCreateRequest( + name=f"Script {uid}", + script="print('hello')", + lang="python", + lang_version="3.11", + ) + + +# --- Created resource fixtures --- + + +@pytest_asyncio.fixture +async def created_execution( + test_user: AsyncClient, simple_execution_request: ExecutionRequest +) -> ExecutionResponse: + """Execution created by test_user.""" + resp = await test_user.post( + "/api/v1/execute", json=simple_execution_request.model_dump() + ) + assert resp.status_code == 200 + return ExecutionResponse.model_validate(resp.json()) + + +@pytest_asyncio.fixture +async def created_execution_admin( + test_admin: AsyncClient, simple_execution_request: ExecutionRequest +) -> ExecutionResponse: + """Execution created by test_admin.""" + resp = await test_admin.post( + "/api/v1/execute", json=simple_execution_request.model_dump() + ) + assert resp.status_code == 200 + return ExecutionResponse.model_validate(resp.json()) diff --git a/backend/tests/e2e/core/test_container.py b/backend/tests/e2e/core/test_container.py new file mode 100644 index 00000000..1e0ce4b5 --- /dev/null +++ b/backend/tests/e2e/core/test_container.py @@ -0,0 +1,207 @@ +import logging + +import pytest +import redis.asyncio as aioredis +from app.core.database_context import Database +from app.core.security import SecurityService +from app.events.schema.schema_registry import SchemaRegistryManager +from app.services.event_service import EventService +from app.services.execution_service import ExecutionService +from app.services.notification_service import NotificationService +from app.services.rate_limit_service import RateLimitService +from app.services.replay_service import ReplayService +from app.services.saved_script_service import SavedScriptService +from app.services.user_service import UserService +from app.services.user_settings_service import UserSettingsService +from app.settings import Settings +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] + + +class TestCoreInfrastructure: + """Tests for core infrastructure dependency resolution.""" + + @pytest.mark.asyncio + async def test_resolves_settings(self, scope: AsyncContainer) -> None: + """Container resolves Settings.""" + settings = await scope.get(Settings) + + assert isinstance(settings, Settings) + assert settings.PROJECT_NAME is not None + + @pytest.mark.asyncio + async def test_resolves_logger(self, scope: AsyncContainer) -> None: + """Container resolves Logger.""" + logger = await scope.get(logging.Logger) + + assert isinstance(logger, logging.Logger) + assert logger.name == "integr8scode" + + @pytest.mark.asyncio + async def test_resolves_database(self, scope: AsyncContainer) -> None: + """Container resolves Database.""" + database = await scope.get(Database) + + assert database is not None + assert database.name is not None + assert isinstance(database.name, str) + + @pytest.mark.asyncio + async def test_resolves_redis(self, scope: AsyncContainer) -> None: + """Container resolves Redis client.""" + redis_client = await scope.get(aioredis.Redis) + + assert redis_client is not None + # Verify connection works + pong = await redis_client.ping() # type: ignore[misc] + assert pong is True + + +class TestSecurityServices: + """Tests for security-related service resolution.""" + + @pytest.mark.asyncio + async def test_resolves_security_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves SecurityService.""" + security = await scope.get(SecurityService) + + assert isinstance(security, SecurityService) + assert security.settings is not None + + +class TestEventServices: + """Tests for event-related service resolution.""" + + @pytest.mark.asyncio + async def test_resolves_event_service(self, scope: AsyncContainer) -> None: + """Container resolves EventService.""" + service = await scope.get(EventService) + + assert isinstance(service, EventService) + + @pytest.mark.asyncio + async def test_resolves_schema_registry( + self, scope: AsyncContainer + ) -> None: + """Container resolves SchemaRegistryManager.""" + registry = await scope.get(SchemaRegistryManager) + + assert isinstance(registry, SchemaRegistryManager) + + +class TestBusinessServices: + """Tests for business service resolution.""" + + @pytest.mark.asyncio + async def test_resolves_execution_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves ExecutionService.""" + service = await scope.get(ExecutionService) + + assert isinstance(service, ExecutionService) + + @pytest.mark.asyncio + async def test_resolves_user_service(self, scope: AsyncContainer) -> None: + """Container resolves UserService.""" + service = await scope.get(UserService) + + assert isinstance(service, UserService) + + @pytest.mark.asyncio + async def test_resolves_saved_script_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves SavedScriptService.""" + service = await scope.get(SavedScriptService) + + assert isinstance(service, SavedScriptService) + + @pytest.mark.asyncio + async def test_resolves_notification_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves NotificationService.""" + service = await scope.get(NotificationService) + + assert isinstance(service, NotificationService) + + @pytest.mark.asyncio + async def test_resolves_user_settings_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves UserSettingsService.""" + service = await scope.get(UserSettingsService) + + assert isinstance(service, UserSettingsService) + + @pytest.mark.asyncio + async def test_resolves_rate_limit_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves RateLimitService.""" + service = await scope.get(RateLimitService) + + assert isinstance(service, RateLimitService) + + @pytest.mark.asyncio + async def test_resolves_replay_service( + self, scope: AsyncContainer + ) -> None: + """Container resolves ReplayService.""" + service = await scope.get(ReplayService) + + assert isinstance(service, ReplayService) + + +class TestServiceDependencies: + """Tests that services have their dependencies correctly injected.""" + + @pytest.mark.asyncio + async def test_execution_service_has_dependencies( + self, scope: AsyncContainer + ) -> None: + """ExecutionService has required dependencies.""" + service = await scope.get(ExecutionService) + + # Check that key dependencies are present + assert service.settings is not None + assert service.execution_repo is not None + assert service.event_store is not None + + @pytest.mark.asyncio + async def test_security_service_uses_settings( + self, scope: AsyncContainer + ) -> None: + """SecurityService uses injected settings.""" + settings = await scope.get(Settings) + security = await scope.get(SecurityService) + + # Both should reference same settings + assert security.settings.SECRET_KEY == settings.SECRET_KEY + assert security.settings.ALGORITHM == settings.ALGORITHM + + +class TestContainerScoping: + """Tests for container scope behavior.""" + + @pytest.mark.asyncio + async def test_same_scope_returns_same_instance( + self, scope: AsyncContainer + ) -> None: + """Same scope returns same service instance.""" + service1 = await scope.get(ExecutionService) + service2 = await scope.get(ExecutionService) + + assert service1 is service2 + + @pytest.mark.asyncio + async def test_settings_is_singleton(self, scope: AsyncContainer) -> None: + """Settings is a singleton across the scope.""" + settings1 = await scope.get(Settings) + settings2 = await scope.get(Settings) + + assert settings1 is settings2 diff --git a/backend/tests/e2e/core/test_dishka_lifespan.py b/backend/tests/e2e/core/test_dishka_lifespan.py new file mode 100644 index 00000000..39aada74 --- /dev/null +++ b/backend/tests/e2e/core/test_dishka_lifespan.py @@ -0,0 +1,99 @@ +from importlib import import_module + +import pytest +from app.settings import Settings +from dishka import AsyncContainer +from fastapi import FastAPI + +pytestmark = pytest.mark.e2e + + +class TestLifespanContainerSetup: + """Tests for DI container setup during lifespan.""" + + def test_lifespan_container_attached(self, app: FastAPI) -> None: + """Container is attached to app.state after lifespan starts.""" + assert isinstance(app, FastAPI) + assert hasattr(app.state, "dishka_container") + assert app.state.dishka_container is not None + + def test_container_is_async_container(self, app: FastAPI) -> None: + """Attached container is an AsyncContainer.""" + assert isinstance(app.state.dishka_container, AsyncContainer) + + +class TestCreateAppAttachesContainer: + """Tests for create_app container attachment.""" + + def test_create_app_attaches_container( + self, test_settings: Settings + ) -> None: + """create_app attaches DI container to app.state.""" + create_app = import_module("app.main").create_app + app = create_app(settings=test_settings) + + assert isinstance(app, FastAPI) + assert hasattr(app.state, "dishka_container") + assert app.state.dishka_container is not None + + def test_create_app_uses_provided_settings( + self, test_settings: Settings + ) -> None: + """create_app uses provided settings in container context.""" + create_app = import_module("app.main").create_app + app = create_app(settings=test_settings) + + # App title should match settings + assert app.title == test_settings.PROJECT_NAME + + +class TestLifespanInitialization: + """Tests for services initialized during lifespan.""" + + @pytest.mark.asyncio + async def test_beanie_initialized(self, scope: AsyncContainer) -> None: + """Beanie ODM is initialized during lifespan.""" + from app.core.database_context import Database + + database = await scope.get(Database) + assert database is not None + # Database name should be set + assert database.name is not None + + @pytest.mark.asyncio + async def test_redis_connected(self, scope: AsyncContainer) -> None: + """Redis client is connected during lifespan.""" + import redis.asyncio as aioredis + + redis_client = await scope.get(aioredis.Redis) + # Should be able to ping + pong = await redis_client.ping() # type: ignore[misc] + assert pong is True + + @pytest.mark.asyncio + async def test_schema_registry_initialized( + self, scope: AsyncContainer + ) -> None: + """Schema registry is initialized during lifespan.""" + from app.events.schema.schema_registry import SchemaRegistryManager + + registry = await scope.get(SchemaRegistryManager) + assert registry is not None + + @pytest.mark.asyncio + async def test_sse_bridge_available(self, scope: AsyncContainer) -> None: + """SSE Kafka bridge is available after lifespan.""" + from app.services.sse.kafka_redis_bridge import SSEKafkaRedisBridge + + bridge = await scope.get(SSEKafkaRedisBridge) + assert bridge is not None + + @pytest.mark.asyncio + async def test_event_store_consumer_available( + self, scope: AsyncContainer + ) -> None: + """Event store consumer is available after lifespan.""" + from app.events.event_store_consumer import EventStoreConsumer + + consumer = await scope.get(EventStoreConsumer) + assert consumer is not None diff --git a/backend/tests/e2e/core/test_exception_handlers.py b/backend/tests/e2e/core/test_exception_handlers.py new file mode 100644 index 00000000..e8cd072a --- /dev/null +++ b/backend/tests/e2e/core/test_exception_handlers.py @@ -0,0 +1,93 @@ +import httpx +import pytest +from app.domain.exceptions import DomainError +from fastapi import FastAPI + +pytestmark = pytest.mark.e2e + + +class TestExceptionHandlerRegistration: + """Tests that exception handlers are properly registered.""" + + def test_domain_error_handler_registered(self, app: FastAPI) -> None: + """DomainError handler is registered on app.""" + assert DomainError in app.exception_handlers + + +class TestExceptionHandlerBehavior: + """Tests for exception handler behavior via HTTP requests.""" + + @pytest.mark.asyncio + async def test_not_found_returns_404( + self, client: httpx.AsyncClient + ) -> None: + """Nonexistent execution returns 404.""" + response = await client.get( + "/api/v1/executions/nonexistent-id-12345/result" + ) + + assert response.status_code == 404 + body = response.json() + assert "detail" in body + + @pytest.mark.asyncio + async def test_unauthorized_returns_401( + self, client: httpx.AsyncClient + ) -> None: + """Invalid credentials return 401.""" + response = await client.post( + "/api/v1/auth/login", + data={"username": "nonexistent", "password": "wrongpass"}, + ) + + assert response.status_code == 401 + + @pytest.mark.asyncio + async def test_forbidden_returns_403( + self, test_user: httpx.AsyncClient + ) -> None: + """Accessing admin endpoint as user returns 403.""" + response = await test_user.get("/api/v1/admin/users") + + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_validation_error_format( + self, test_user: httpx.AsyncClient + ) -> None: + """Validation errors return proper format.""" + # Send invalid data (empty script) + response = await test_user.post( + "/api/v1/execute", + json={ + "script": "", # Empty script should fail validation + "lang": "python", + "lang_version": "3.11", + }, + ) + + # Should get 422 for validation error + assert response.status_code == 422 + + @pytest.mark.asyncio + async def test_conflict_error_on_duplicate( + self, client: httpx.AsyncClient + ) -> None: + """Duplicate registration returns 409.""" + # First registration + import uuid + + unique_suffix = uuid.uuid4().hex[:8] + user_data = { + "username": f"duplicate_test_{unique_suffix}", + "email": f"duplicate_{unique_suffix}@example.com", + "password": "TestPass123!", + } + + response1 = await client.post("/api/v1/auth/register", json=user_data) + assert response1.status_code == 200 + + # Second registration with same email + response2 = await client.post("/api/v1/auth/register", json=user_data) + # Should be 409 or 400 depending on implementation + assert response2.status_code in (400, 409) diff --git a/backend/tests/e2e/core/test_middlewares.py b/backend/tests/e2e/core/test_middlewares.py new file mode 100644 index 00000000..3975f18b --- /dev/null +++ b/backend/tests/e2e/core/test_middlewares.py @@ -0,0 +1,283 @@ +import httpx +import pytest + +pytestmark = pytest.mark.e2e + + +class TestCorrelationMiddleware: + """Tests for CorrelationMiddleware.""" + + @pytest.mark.asyncio + async def test_generates_correlation_id(self, client: httpx.AsyncClient) -> None: + """Middleware generates correlation ID when not provided.""" + response = await client.get("/api/v1/health/live") + + assert response.status_code == 200 + assert "X-Correlation-ID" in response.headers + correlation_id = response.headers["X-Correlation-ID"] + assert correlation_id.startswith("req_") + + @pytest.mark.asyncio + async def test_passes_through_correlation_id( + self, client: httpx.AsyncClient + ) -> None: + """Middleware uses provided correlation ID.""" + custom_id = "custom-correlation-12345" + + response = await client.get( + "/api/v1/health/live", + headers={"X-Correlation-ID": custom_id}, + ) + + assert response.status_code == 200 + assert response.headers["X-Correlation-ID"] == custom_id + + @pytest.mark.asyncio + async def test_accepts_request_id_header( + self, client: httpx.AsyncClient + ) -> None: + """Middleware accepts X-Request-ID as alternative header.""" + request_id = "request-id-67890" + + response = await client.get( + "/api/v1/health/live", + headers={"X-Request-ID": request_id}, + ) + + assert response.status_code == 200 + # Should use request ID as correlation ID + assert response.headers["X-Correlation-ID"] == request_id + + +class TestCSRFMiddleware: + """Tests for CSRFMiddleware.""" + + @pytest.mark.asyncio + async def test_get_requests_skip_csrf( + self, client: httpx.AsyncClient + ) -> None: + """GET requests skip CSRF validation.""" + response = await client.get("/api/v1/health/live") + + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_auth_endpoints_skip_csrf( + self, client: httpx.AsyncClient + ) -> None: + """Auth endpoints skip CSRF validation.""" + # Login endpoint should work without CSRF token + response = await client.post( + "/api/v1/auth/login", + data={"username": "nonexistent", "password": "wrong"}, + ) + + # 401 means auth failed but CSRF didn't block it + assert response.status_code == 401 + + @pytest.mark.asyncio + async def test_authenticated_post_requires_csrf( + self, test_user: httpx.AsyncClient + ) -> None: + """Authenticated POST requires CSRF token.""" + # Remove CSRF header temporarily + csrf_token = test_user.headers.get("X-CSRF-Token") + del test_user.headers["X-CSRF-Token"] + + response = await test_user.post( + "/api/v1/execute", + json={"script": "print('test')", "lang": "python"}, + ) + + # Restore header + test_user.headers["X-CSRF-Token"] = csrf_token + + # Should be rejected for missing CSRF + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_authenticated_post_with_csrf_succeeds( + self, test_user: httpx.AsyncClient + ) -> None: + """Authenticated POST with valid CSRF token succeeds.""" + response = await test_user.post( + "/api/v1/execute", + json={ + "script": "print('hello')", + "lang": "python", + "lang_version": "3.11", + }, + ) + + # Should succeed (or at least not be blocked by CSRF) + assert response.status_code != 403 + + +class TestRequestSizeLimitMiddleware: + """Tests for RequestSizeLimitMiddleware.""" + + @pytest.mark.asyncio + async def test_small_request_allowed( + self, client: httpx.AsyncClient + ) -> None: + """Small requests are allowed through.""" + response = await client.post( + "/api/v1/auth/register", + json={ + "username": "testuser", + "email": "test@example.com", + "password": "TestPass123!", + }, + ) + + # Not 413 (request too large) + assert response.status_code != 413 + + @pytest.mark.asyncio + async def test_large_request_rejected( + self, client: httpx.AsyncClient + ) -> None: + """Requests exceeding size limit are rejected.""" + # Create a payload larger than 10MB + large_payload = "x" * (11 * 1024 * 1024) # 11MB + + response = await client.post( + "/api/v1/auth/register", + content=large_payload, + headers={"Content-Type": "text/plain"}, + ) + + assert response.status_code == 413 + assert "too large" in response.json()["detail"].lower() + + +class TestCacheControlMiddleware: + """Tests for CacheControlMiddleware.""" + + @pytest.mark.asyncio + async def test_k8s_limits_cached(self, client: httpx.AsyncClient) -> None: + """K8s limits endpoint has cache headers.""" + response = await client.get("/api/v1/k8s-limits") + + assert response.status_code == 200 + cache_control = response.headers.get("Cache-Control", "") + assert "public" in cache_control + assert "max-age=300" in cache_control + + @pytest.mark.asyncio + async def test_example_scripts_cached( + self, client: httpx.AsyncClient + ) -> None: + """Example scripts endpoint has cache headers.""" + response = await client.get("/api/v1/example-scripts") + + assert response.status_code == 200 + cache_control = response.headers.get("Cache-Control", "") + assert "public" in cache_control + assert "max-age=600" in cache_control + + @pytest.mark.asyncio + async def test_notifications_no_cache( + self, test_user: httpx.AsyncClient + ) -> None: + """Notifications endpoint has no-cache headers.""" + response = await test_user.get("/api/v1/notifications") + + assert response.status_code == 200 + cache_control = response.headers.get("Cache-Control", "") + assert "no-cache" in cache_control + + @pytest.mark.asyncio + async def test_post_no_cache_headers( + self, test_user: httpx.AsyncClient + ) -> None: + """POST requests don't get cache headers.""" + response = await test_user.post( + "/api/v1/execute", + json={ + "script": "print('test')", + "lang": "python", + "lang_version": "3.11", + }, + ) + + # POST should not have cache-control set by CacheControlMiddleware + cache_control = response.headers.get("Cache-Control", "") + # If cache-control is set, it shouldn't be the public caching type + if cache_control: + assert "public" not in cache_control or "max-age=300" not in cache_control + + +class TestRateLimitMiddleware: + """Tests for RateLimitMiddleware.""" + + @pytest.mark.asyncio + async def test_rate_limit_headers_present( + self, client: httpx.AsyncClient + ) -> None: + """Rate limit headers are added to responses.""" + response = await client.get("/api/v1/k8s-limits") + + assert response.status_code == 200 + # Rate limit headers should be present + assert "X-RateLimit-Limit" in response.headers + assert "X-RateLimit-Remaining" in response.headers + assert "X-RateLimit-Reset" in response.headers + + @pytest.mark.asyncio + async def test_health_endpoint_exempt( + self, client: httpx.AsyncClient + ) -> None: + """Health endpoints are exempt from rate limiting.""" + # Make many requests to health endpoint + for _ in range(20): + response = await client.get("/api/v1/health/live") + # Should never get rate limited + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_auth_endpoints_exempt( + self, client: httpx.AsyncClient + ) -> None: + """Auth endpoints are exempt from rate limiting.""" + # Make many login attempts + for _ in range(10): + response = await client.post( + "/api/v1/auth/login", + data={"username": "test", "password": "wrong"}, + ) + # Should be 401 (auth failed), not 429 (rate limited) + assert response.status_code == 401 + + +class TestMiddlewareOrder: + """Tests for middleware execution order.""" + + @pytest.mark.asyncio + async def test_correlation_id_before_other_processing( + self, client: httpx.AsyncClient + ) -> None: + """Correlation ID is set before other middleware runs.""" + # Even on error responses, correlation ID should be present + response = await client.get("/nonexistent-path") + + assert "X-Correlation-ID" in response.headers + + @pytest.mark.asyncio + async def test_all_middlewares_work_together( + self, test_user: httpx.AsyncClient + ) -> None: + """All middlewares work correctly in combination.""" + response = await test_user.get("/api/v1/notifications") + + # Correlation middleware ran + assert "X-Correlation-ID" in response.headers + + # Cache control middleware ran + assert "Cache-Control" in response.headers + + # Rate limit middleware ran + assert "X-RateLimit-Limit" in response.headers + + # Request completed successfully + assert response.status_code == 200 diff --git a/backend/tests/integration/__init__.py b/backend/tests/e2e/db/repositories/__init__.py similarity index 100% rename from backend/tests/integration/__init__.py rename to backend/tests/e2e/db/repositories/__init__.py diff --git a/backend/tests/integration/db/repositories/test_admin_settings_repository.py b/backend/tests/e2e/db/repositories/test_admin_settings_repository.py similarity index 97% rename from backend/tests/integration/db/repositories/test_admin_settings_repository.py rename to backend/tests/e2e/db/repositories/test_admin_settings_repository.py index 1f61ce95..e573ad6e 100644 --- a/backend/tests/integration/db/repositories/test_admin_settings_repository.py +++ b/backend/tests/e2e/db/repositories/test_admin_settings_repository.py @@ -4,7 +4,7 @@ from app.domain.admin import SystemSettings from dishka import AsyncContainer -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e @pytest.fixture() diff --git a/backend/tests/integration/db/repositories/test_dlq_repository.py b/backend/tests/e2e/db/repositories/test_dlq_repository.py similarity index 98% rename from backend/tests/integration/db/repositories/test_dlq_repository.py rename to backend/tests/e2e/db/repositories/test_dlq_repository.py index b016f7f3..9464d087 100644 --- a/backend/tests/integration/db/repositories/test_dlq_repository.py +++ b/backend/tests/e2e/db/repositories/test_dlq_repository.py @@ -7,7 +7,7 @@ from app.dlq import DLQMessageStatus from app.domain.enums.events import EventType -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e _test_logger = logging.getLogger("test.db.repositories.dlq_repository") diff --git a/backend/tests/integration/db/repositories/test_execution_repository.py b/backend/tests/e2e/db/repositories/test_execution_repository.py similarity index 97% rename from backend/tests/integration/db/repositories/test_execution_repository.py rename to backend/tests/e2e/db/repositories/test_execution_repository.py index eb3bf2cb..ce701bd8 100644 --- a/backend/tests/integration/db/repositories/test_execution_repository.py +++ b/backend/tests/e2e/db/repositories/test_execution_repository.py @@ -8,7 +8,7 @@ _test_logger = logging.getLogger("test.db.repositories.execution_repository") -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e @pytest.mark.asyncio diff --git a/backend/tests/integration/db/repositories/test_saved_script_repository.py b/backend/tests/e2e/db/repositories/test_saved_script_repository.py similarity index 97% rename from backend/tests/integration/db/repositories/test_saved_script_repository.py rename to backend/tests/e2e/db/repositories/test_saved_script_repository.py index 58ebfd90..5be6a5f6 100644 --- a/backend/tests/integration/db/repositories/test_saved_script_repository.py +++ b/backend/tests/e2e/db/repositories/test_saved_script_repository.py @@ -3,7 +3,7 @@ from app.domain.saved_script import DomainSavedScriptCreate, DomainSavedScriptUpdate from dishka import AsyncContainer -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e @pytest.fixture() diff --git a/backend/tests/integration/db/repositories/__init__.py b/backend/tests/e2e/dlq/__init__.py similarity index 100% rename from backend/tests/integration/db/repositories/__init__.py rename to backend/tests/e2e/dlq/__init__.py diff --git a/backend/tests/integration/dlq/test_dlq_discard.py b/backend/tests/e2e/dlq/test_dlq_discard.py similarity index 97% rename from backend/tests/integration/dlq/test_dlq_discard.py rename to backend/tests/e2e/dlq/test_dlq_discard.py index 8932fa51..2c4650f4 100644 --- a/backend/tests/integration/dlq/test_dlq_discard.py +++ b/backend/tests/e2e/dlq/test_dlq_discard.py @@ -9,9 +9,9 @@ from app.domain.enums.kafka import KafkaTopic from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] _test_logger = logging.getLogger("test.dlq.discard") diff --git a/backend/tests/integration/dlq/test_dlq_manager.py b/backend/tests/e2e/dlq/test_dlq_manager.py similarity index 96% rename from backend/tests/integration/dlq/test_dlq_manager.py rename to backend/tests/e2e/dlq/test_dlq_manager.py index 6af47303..381f90e2 100644 --- a/backend/tests/integration/dlq/test_dlq_manager.py +++ b/backend/tests/e2e/dlq/test_dlq_manager.py @@ -15,12 +15,12 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event # xdist_group: DLQ tests share a Kafka consumer group. When running in parallel, # different workers' managers consume each other's messages and apply wrong policies. # Serial execution ensures each test's manager processes only its own messages. -pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb, pytest.mark.xdist_group("dlq")] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka, pytest.mark.mongodb, pytest.mark.xdist_group("dlq")] _test_logger = logging.getLogger("test.dlq.manager") diff --git a/backend/tests/integration/dlq/test_dlq_retry.py b/backend/tests/e2e/dlq/test_dlq_retry.py similarity index 98% rename from backend/tests/integration/dlq/test_dlq_retry.py rename to backend/tests/e2e/dlq/test_dlq_retry.py index f82765cd..d01fefe7 100644 --- a/backend/tests/integration/dlq/test_dlq_retry.py +++ b/backend/tests/e2e/dlq/test_dlq_retry.py @@ -9,9 +9,9 @@ from app.domain.enums.kafka import KafkaTopic from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] _test_logger = logging.getLogger("test.dlq.retry") diff --git a/backend/tests/integration/dlq/__init__.py b/backend/tests/e2e/events/__init__.py similarity index 100% rename from backend/tests/integration/dlq/__init__.py rename to backend/tests/e2e/events/__init__.py diff --git a/backend/tests/integration/events/test_consume_roundtrip.py b/backend/tests/e2e/events/test_consume_roundtrip.py similarity index 93% rename from backend/tests/integration/events/test_consume_roundtrip.py rename to backend/tests/e2e/events/test_consume_roundtrip.py index 94193247..3b7d969b 100644 --- a/backend/tests/integration/events/test_consume_roundtrip.py +++ b/backend/tests/e2e/events/test_consume_roundtrip.py @@ -14,11 +14,11 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event # xdist_group: Kafka consumer creation can crash librdkafka when multiple workers # instantiate Consumer() objects simultaneously. Serial execution prevents this. -pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] _test_logger = logging.getLogger("test.events.consume_roundtrip") diff --git a/backend/tests/integration/events/test_consumer_group_monitor.py b/backend/tests/e2e/events/test_consumer_group_monitor.py similarity index 96% rename from backend/tests/integration/events/test_consumer_group_monitor.py rename to backend/tests/e2e/events/test_consumer_group_monitor.py index 11d535dd..97d45c21 100644 --- a/backend/tests/integration/events/test_consumer_group_monitor.py +++ b/backend/tests/e2e/events/test_consumer_group_monitor.py @@ -7,7 +7,7 @@ _test_logger = logging.getLogger("test.events.consumer_group_monitor") -@pytest.mark.integration +@pytest.mark.e2e @pytest.mark.kafka @pytest.mark.asyncio async def test_list_groups_and_error_status(test_settings: Settings) -> None: diff --git a/backend/tests/integration/events/test_consumer_group_monitor_real.py b/backend/tests/e2e/events/test_consumer_group_monitor_real.py similarity index 98% rename from backend/tests/integration/events/test_consumer_group_monitor_real.py rename to backend/tests/e2e/events/test_consumer_group_monitor_real.py index adcead33..1c08dfa0 100644 --- a/backend/tests/integration/events/test_consumer_group_monitor_real.py +++ b/backend/tests/e2e/events/test_consumer_group_monitor_real.py @@ -10,7 +10,7 @@ ) from app.settings import Settings -pytestmark = [pytest.mark.integration, pytest.mark.kafka] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] _test_logger = logging.getLogger("test.events.consumer_group_monitor_real") diff --git a/backend/tests/integration/events/test_consumer_lifecycle.py b/backend/tests/e2e/events/test_consumer_lifecycle.py similarity index 94% rename from backend/tests/integration/events/test_consumer_lifecycle.py rename to backend/tests/e2e/events/test_consumer_lifecycle.py index 5374e152..98c53a08 100644 --- a/backend/tests/integration/events/test_consumer_lifecycle.py +++ b/backend/tests/e2e/events/test_consumer_lifecycle.py @@ -11,7 +11,7 @@ # xdist_group: Kafka consumer creation can crash librdkafka when multiple workers # instantiate Consumer() objects simultaneously. Serial execution prevents this. -pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] _test_logger = logging.getLogger("test.events.consumer_lifecycle") diff --git a/backend/tests/integration/events/test_dlq_handler.py b/backend/tests/e2e/events/test_dlq_handler.py similarity index 97% rename from backend/tests/integration/events/test_dlq_handler.py rename to backend/tests/e2e/events/test_dlq_handler.py index 16536256..d96dde5e 100644 --- a/backend/tests/integration/events/test_dlq_handler.py +++ b/backend/tests/e2e/events/test_dlq_handler.py @@ -5,7 +5,7 @@ from app.events.core import UnifiedProducer, create_dlq_error_handler, create_immediate_dlq_handler from dishka import AsyncContainer -pytestmark = [pytest.mark.integration, pytest.mark.kafka] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] _test_logger = logging.getLogger("test.events.dlq_handler") diff --git a/backend/tests/integration/events/test_event_dispatcher.py b/backend/tests/e2e/events/test_event_dispatcher.py similarity index 94% rename from backend/tests/integration/events/test_event_dispatcher.py rename to backend/tests/e2e/events/test_event_dispatcher.py index 3d166cec..2ead3aa3 100644 --- a/backend/tests/integration/events/test_event_dispatcher.py +++ b/backend/tests/e2e/events/test_event_dispatcher.py @@ -14,11 +14,11 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event # xdist_group: Kafka consumer creation can crash librdkafka when multiple workers # instantiate Consumer() objects simultaneously. Serial execution prevents this. -pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka, pytest.mark.xdist_group("kafka_consumers")] _test_logger = logging.getLogger("test.events.event_dispatcher") diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/e2e/events/test_event_store.py similarity index 97% rename from backend/tests/integration/events/test_event_store.py rename to backend/tests/e2e/events/test_event_store.py index 470297c5..8bd82173 100644 --- a/backend/tests/integration/events/test_event_store.py +++ b/backend/tests/e2e/events/test_event_store.py @@ -9,9 +9,9 @@ from app.events.event_store import EventStore from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] _test_logger = logging.getLogger("test.events.event_store") diff --git a/backend/tests/integration/events/test_producer_roundtrip.py b/backend/tests/e2e/events/test_producer_roundtrip.py similarity index 91% rename from backend/tests/integration/events/test_producer_roundtrip.py rename to backend/tests/e2e/events/test_producer_roundtrip.py index cb91df15..8340610b 100644 --- a/backend/tests/integration/events/test_producer_roundtrip.py +++ b/backend/tests/e2e/events/test_producer_roundtrip.py @@ -9,9 +9,9 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.kafka] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] _test_logger = logging.getLogger("test.events.producer_roundtrip") diff --git a/backend/tests/integration/events/test_schema_registry_real.py b/backend/tests/e2e/events/test_schema_registry_real.py similarity index 94% rename from backend/tests/integration/events/test_schema_registry_real.py rename to backend/tests/e2e/events/test_schema_registry_real.py index 3e9da631..d6c182de 100644 --- a/backend/tests/integration/events/test_schema_registry_real.py +++ b/backend/tests/e2e/events/test_schema_registry_real.py @@ -6,7 +6,7 @@ from app.infrastructure.kafka.mappings import get_topic_for_event from app.settings import Settings -pytestmark = [pytest.mark.integration, pytest.mark.kafka] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] _test_logger = logging.getLogger("test.events.schema_registry_real") diff --git a/backend/tests/integration/events/test_schema_registry_roundtrip.py b/backend/tests/e2e/events/test_schema_registry_roundtrip.py similarity index 93% rename from backend/tests/integration/events/test_schema_registry_roundtrip.py rename to backend/tests/e2e/events/test_schema_registry_roundtrip.py index f23b2fe6..c15faca2 100644 --- a/backend/tests/integration/events/test_schema_registry_roundtrip.py +++ b/backend/tests/e2e/events/test_schema_registry_roundtrip.py @@ -6,9 +6,9 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration] +pytestmark = [pytest.mark.e2e] _test_logger = logging.getLogger("test.events.schema_registry_roundtrip") diff --git a/backend/tests/integration/events/__init__.py b/backend/tests/e2e/idempotency/__init__.py similarity index 100% rename from backend/tests/integration/events/__init__.py rename to backend/tests/e2e/idempotency/__init__.py diff --git a/backend/tests/integration/idempotency/test_consumer_idempotent.py b/backend/tests/e2e/idempotency/test_consumer_idempotent.py similarity index 97% rename from backend/tests/integration/idempotency/test_consumer_idempotent.py rename to backend/tests/e2e/idempotency/test_consumer_idempotent.py index 19d4b05f..a2c37f90 100644 --- a/backend/tests/integration/idempotency/test_consumer_idempotent.py +++ b/backend/tests/e2e/idempotency/test_consumer_idempotent.py @@ -15,12 +15,12 @@ from app.settings import Settings from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event # xdist_group: Kafka consumer creation can crash librdkafka when multiple workers # instantiate Consumer() objects simultaneously. Serial execution prevents this. pytestmark = [ - pytest.mark.integration, + pytest.mark.e2e, pytest.mark.kafka, pytest.mark.redis, pytest.mark.xdist_group("kafka_consumers"), diff --git a/backend/tests/integration/idempotency/test_decorator_idempotent.py b/backend/tests/e2e/idempotency/test_decorator_idempotent.py similarity index 94% rename from backend/tests/integration/idempotency/test_decorator_idempotent.py rename to backend/tests/e2e/idempotency/test_decorator_idempotent.py index 65e5b8b8..ec0c94f2 100644 --- a/backend/tests/integration/idempotency/test_decorator_idempotent.py +++ b/backend/tests/e2e/idempotency/test_decorator_idempotent.py @@ -6,12 +6,12 @@ from app.services.idempotency.middleware import idempotent_handler from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event _test_logger = logging.getLogger("test.idempotency.decorator_idempotent") -pytestmark = [pytest.mark.integration] +pytestmark = [pytest.mark.e2e] @pytest.mark.asyncio diff --git a/backend/tests/integration/idempotency/test_idempotency.py b/backend/tests/e2e/idempotency/test_idempotency.py similarity index 99% rename from backend/tests/integration/idempotency/test_idempotency.py rename to backend/tests/e2e/idempotency/test_idempotency.py index 032a7f46..460cf3c8 100644 --- a/backend/tests/integration/idempotency/test_idempotency.py +++ b/backend/tests/e2e/idempotency/test_idempotency.py @@ -16,9 +16,9 @@ from app.services.idempotency.redis_repository import RedisIdempotencyRepository from app.settings import Settings -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.redis] +pytestmark = [pytest.mark.e2e, pytest.mark.redis] # Test logger for all tests _test_logger = logging.getLogger("test.idempotency") diff --git a/backend/tests/integration/idempotency/test_idempotent_handler.py b/backend/tests/e2e/idempotency/test_idempotent_handler.py similarity index 94% rename from backend/tests/integration/idempotency/test_idempotent_handler.py rename to backend/tests/e2e/idempotency/test_idempotent_handler.py index c7ef5730..bfcea09c 100644 --- a/backend/tests/integration/idempotency/test_idempotent_handler.py +++ b/backend/tests/e2e/idempotency/test_idempotent_handler.py @@ -6,9 +6,9 @@ from app.services.idempotency.middleware import IdempotentEventHandler from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration] +pytestmark = [pytest.mark.e2e] _test_logger = logging.getLogger("test.idempotency.idempotent_handler") diff --git a/backend/tests/integration/idempotency/__init__.py b/backend/tests/e2e/notifications/__init__.py similarity index 100% rename from backend/tests/integration/idempotency/__init__.py rename to backend/tests/e2e/notifications/__init__.py diff --git a/backend/tests/integration/notifications/test_notification_sse.py b/backend/tests/e2e/notifications/test_notification_sse.py similarity index 96% rename from backend/tests/integration/notifications/test_notification_sse.py rename to backend/tests/e2e/notifications/test_notification_sse.py index 10327629..9e922be8 100644 --- a/backend/tests/integration/notifications/test_notification_sse.py +++ b/backend/tests/e2e/notifications/test_notification_sse.py @@ -8,7 +8,7 @@ from app.services.sse.redis_bus import SSERedisBus from dishka import AsyncContainer -pytestmark = [pytest.mark.integration, pytest.mark.redis] +pytestmark = [pytest.mark.e2e, pytest.mark.redis] @pytest.mark.asyncio diff --git a/backend/tests/integration/notifications/__init__.py b/backend/tests/e2e/result_processor/__init__.py similarity index 100% rename from backend/tests/integration/notifications/__init__.py rename to backend/tests/e2e/result_processor/__init__.py diff --git a/backend/tests/integration/result_processor/test_result_processor.py b/backend/tests/e2e/result_processor/test_result_processor.py similarity index 99% rename from backend/tests/integration/result_processor/test_result_processor.py rename to backend/tests/e2e/result_processor/test_result_processor.py index de2546d6..705daa84 100644 --- a/backend/tests/integration/result_processor/test_result_processor.py +++ b/backend/tests/e2e/result_processor/test_result_processor.py @@ -24,7 +24,7 @@ # xdist_group: Kafka consumer creation can crash librdkafka when multiple workers # instantiate Consumer() objects simultaneously. Serial execution prevents this. pytestmark = [ - pytest.mark.integration, + pytest.mark.e2e, pytest.mark.kafka, pytest.mark.mongodb, pytest.mark.xdist_group("kafka_consumers"), diff --git a/backend/tests/integration/result_processor/__init__.py b/backend/tests/e2e/services/__init__.py similarity index 100% rename from backend/tests/integration/result_processor/__init__.py rename to backend/tests/e2e/services/__init__.py diff --git a/backend/tests/integration/services/admin/test_admin_user_service.py b/backend/tests/e2e/services/admin/test_admin_user_service.py similarity index 94% rename from backend/tests/integration/services/admin/test_admin_user_service.py rename to backend/tests/e2e/services/admin/test_admin_user_service.py index b9ea3d98..7c8afea3 100644 --- a/backend/tests/integration/services/admin/test_admin_user_service.py +++ b/backend/tests/e2e/services/admin/test_admin_user_service.py @@ -6,7 +6,7 @@ from app.services.admin import AdminUserService from dishka import AsyncContainer -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] @pytest.mark.asyncio diff --git a/backend/tests/e2e/services/coordinator/test_execution_coordinator.py b/backend/tests/e2e/services/coordinator/test_execution_coordinator.py new file mode 100644 index 00000000..5406c7b4 --- /dev/null +++ b/backend/tests/e2e/services/coordinator/test_execution_coordinator.py @@ -0,0 +1,150 @@ +import pytest +from app.services.coordinator.coordinator import ExecutionCoordinator +from dishka import AsyncContainer +from tests.conftest import make_execution_requested_event + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +class TestHandleExecutionRequested: + """Tests for _handle_execution_requested method.""" + + @pytest.mark.asyncio + async def test_handle_requested_schedules_execution( + self, scope: AsyncContainer + ) -> None: + """Handler schedules execution immediately.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + ev = make_execution_requested_event(execution_id="e-sched-1") + + await coord._handle_execution_requested(ev) # noqa: SLF001 + + assert "e-sched-1" in coord._active_executions # noqa: SLF001 + + @pytest.mark.asyncio + async def test_handle_requested_with_priority( + self, scope: AsyncContainer + ) -> None: + """Handler respects execution priority.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + ev = make_execution_requested_event( + execution_id="e-priority-1", + priority=10, # High priority + ) + + await coord._handle_execution_requested(ev) # noqa: SLF001 + + assert "e-priority-1" in coord._active_executions # noqa: SLF001 + + @pytest.mark.asyncio + async def test_handle_requested_unique_executions( + self, scope: AsyncContainer + ) -> None: + """Each execution gets unique tracking.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + ev1 = make_execution_requested_event(execution_id="e-unique-1") + ev2 = make_execution_requested_event(execution_id="e-unique-2") + + await coord._handle_execution_requested(ev1) # noqa: SLF001 + await coord._handle_execution_requested(ev2) # noqa: SLF001 + + assert "e-unique-1" in coord._active_executions # noqa: SLF001 + assert "e-unique-2" in coord._active_executions # noqa: SLF001 + + +class TestGetStatus: + """Tests for get_status method.""" + + @pytest.mark.asyncio + async def test_get_status_returns_dict(self, scope: AsyncContainer) -> None: + """Get status returns dictionary with coordinator info.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + status = await coord.get_status() + + assert isinstance(status, dict) + assert "running" in status + assert "active_executions" in status + assert "queue_stats" in status + assert "resource_stats" in status + + @pytest.mark.asyncio + async def test_get_status_tracks_active_executions( + self, scope: AsyncContainer + ) -> None: + """Status tracks number of active executions.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + initial_status = await coord.get_status() + initial_active = initial_status.get("active_executions", 0) + + # Add execution + ev = make_execution_requested_event(execution_id="e-status-track-1") + await coord._handle_execution_requested(ev) # noqa: SLF001 + + new_status = await coord.get_status() + new_active = new_status.get("active_executions", 0) + + assert new_active == initial_active + 1, ( + f"Expected exactly one more active execution: {initial_active} -> {new_active}" + ) + + +class TestQueueManager: + """Tests for queue manager integration.""" + + @pytest.mark.asyncio + async def test_queue_manager_initialized(self, scope: AsyncContainer) -> None: + """Queue manager is properly initialized.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + assert coord.queue_manager is not None + assert hasattr(coord.queue_manager, "add_execution") + assert hasattr(coord.queue_manager, "get_next_execution") + + +class TestResourceManager: + """Tests for resource manager integration.""" + + @pytest.mark.asyncio + async def test_resource_manager_initialized( + self, scope: AsyncContainer + ) -> None: + """Resource manager is properly initialized.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + assert coord.resource_manager is not None + assert hasattr(coord.resource_manager, "request_allocation") + assert hasattr(coord.resource_manager, "release_allocation") + + @pytest.mark.asyncio + async def test_resource_manager_has_pool( + self, scope: AsyncContainer + ) -> None: + """Resource manager has resource pool configured.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + # Check resource manager has pool with capacity + assert coord.resource_manager.pool is not None + assert coord.resource_manager.pool.total_cpu_cores > 0 + assert coord.resource_manager.pool.total_memory_mb > 0 + + +class TestCoordinatorLifecycle: + """Tests for coordinator lifecycle.""" + + @pytest.mark.asyncio + async def test_coordinator_has_consumer(self, scope: AsyncContainer) -> None: + """Coordinator has Kafka consumer configured.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + # Consumer is set up during start, may be None before + assert hasattr(coord, "consumer") + + @pytest.mark.asyncio + async def test_coordinator_has_producer(self, scope: AsyncContainer) -> None: + """Coordinator has Kafka producer configured.""" + coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) + + assert coord.producer is not None diff --git a/backend/tests/integration/services/events/test_event_bus.py b/backend/tests/e2e/services/events/test_event_bus.py similarity index 98% rename from backend/tests/integration/services/events/test_event_bus.py rename to backend/tests/e2e/services/events/test_event_bus.py index 6f17670b..9a728b39 100644 --- a/backend/tests/integration/services/events/test_event_bus.py +++ b/backend/tests/e2e/services/events/test_event_bus.py @@ -9,7 +9,7 @@ from app.settings import Settings from dishka import AsyncContainer -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e @pytest.mark.asyncio diff --git a/backend/tests/integration/services/events/test_kafka_event_service.py b/backend/tests/e2e/services/events/test_kafka_event_service.py similarity index 96% rename from backend/tests/integration/services/events/test_kafka_event_service.py rename to backend/tests/e2e/services/events/test_kafka_event_service.py index 2463d5c4..1a02e800 100644 --- a/backend/tests/integration/services/events/test_kafka_event_service.py +++ b/backend/tests/e2e/services/events/test_kafka_event_service.py @@ -5,7 +5,7 @@ from app.services.kafka_event_service import KafkaEventService from dishka import AsyncContainer -pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb] +pytestmark = [pytest.mark.e2e, pytest.mark.kafka, pytest.mark.mongodb] @pytest.mark.asyncio diff --git a/backend/tests/e2e/services/execution/test_execution_service.py b/backend/tests/e2e/services/execution/test_execution_service.py new file mode 100644 index 00000000..8ae06e85 --- /dev/null +++ b/backend/tests/e2e/services/execution/test_execution_service.py @@ -0,0 +1,401 @@ +import uuid + +import pytest +from app.domain.enums.events import EventType +from app.domain.enums.execution import ExecutionStatus +from app.domain.execution import ResourceLimitsDomain +from app.domain.execution.exceptions import ExecutionNotFoundError +from app.services.execution_service import ExecutionService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] + + +class TestGetResourceLimits: + """Tests for get_k8s_resource_limits method.""" + + @pytest.mark.asyncio + async def test_get_k8s_resource_limits(self, scope: AsyncContainer) -> None: + """Get K8s resource limits returns valid configuration.""" + svc: ExecutionService = await scope.get(ExecutionService) + limits = await svc.get_k8s_resource_limits() + + assert isinstance(limits, ResourceLimitsDomain) + assert limits.cpu_limit is not None + assert limits.memory_limit is not None + assert limits.cpu_request is not None + assert limits.memory_request is not None + assert limits.execution_timeout > 0 + assert isinstance(limits.supported_runtimes, dict) + assert "python" in limits.supported_runtimes + + +class TestGetExampleScripts: + """Tests for get_example_scripts method.""" + + @pytest.mark.asyncio + async def test_get_example_scripts(self, scope: AsyncContainer) -> None: + """Get example scripts returns dictionary with python.""" + svc: ExecutionService = await scope.get(ExecutionService) + examples = await svc.get_example_scripts() + + assert isinstance(examples, dict) + assert "python" in examples + assert isinstance(examples["python"], str) + assert len(examples["python"]) > 0 + + +class TestExecuteScript: + """Tests for execute_script method.""" + + @pytest.mark.asyncio + async def test_execute_simple_script(self, scope: AsyncContainer) -> None: + """Execute simple script creates execution and returns response.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + result = await svc.execute_script( + script="print('hello world')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + assert result.execution_id is not None + assert result.lang == "python" + assert result.lang_version == "3.11" + assert result.status in [ + ExecutionStatus.QUEUED, + ExecutionStatus.SCHEDULED, + ExecutionStatus.RUNNING, + ] + + @pytest.mark.asyncio + async def test_execute_script_with_custom_timeout( + self, scope: AsyncContainer + ) -> None: + """Execute script with custom timeout override.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + result = await svc.execute_script( + script="import time; time.sleep(1); print('done')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + timeout_override=30, + ) + + assert result.execution_id is not None + assert result.status in [ + ExecutionStatus.QUEUED, + ExecutionStatus.SCHEDULED, + ExecutionStatus.RUNNING, + ] + + @pytest.mark.asyncio + async def test_execute_script_returns_unique_ids( + self, scope: AsyncContainer + ) -> None: + """Each execution gets unique ID.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + result1 = await svc.execute_script( + script="print(1)", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + result2 = await svc.execute_script( + script="print(2)", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + assert result1.execution_id != result2.execution_id + + +class TestGetExecutionResult: + """Tests for get_execution_result method.""" + + @pytest.mark.asyncio + async def test_get_execution_result(self, scope: AsyncContainer) -> None: + """Get execution result for existing execution.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create execution + exec_result = await svc.execute_script( + script="print('test')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Get result + result = await svc.get_execution_result(exec_result.execution_id) + + assert result.execution_id == exec_result.execution_id + assert result.lang == "python" + assert result.user_id == user_id + + @pytest.mark.asyncio + async def test_get_execution_result_not_found( + self, scope: AsyncContainer + ) -> None: + """Get nonexistent execution raises error.""" + svc: ExecutionService = await scope.get(ExecutionService) + + with pytest.raises(ExecutionNotFoundError): + await svc.get_execution_result("nonexistent-execution-id") + + +class TestGetExecutionEvents: + """Tests for get_execution_events method.""" + + @pytest.mark.asyncio + async def test_get_execution_events(self, scope: AsyncContainer) -> None: + """Get events for execution returns list.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create execution + exec_result = await svc.execute_script( + script="print('events test')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Get events + events = await svc.get_execution_events(exec_result.execution_id) + + assert isinstance(events, list) + # Should have at least EXECUTION_REQUESTED event + if events: + event_types = {e.event_type for e in events} + assert EventType.EXECUTION_REQUESTED in event_types + + @pytest.mark.asyncio + async def test_get_execution_events_with_filter( + self, scope: AsyncContainer + ) -> None: + """Get events filtered by type.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + exec_result = await svc.execute_script( + script="print('filter test')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + events = await svc.get_execution_events( + exec_result.execution_id, + event_types=[EventType.EXECUTION_REQUESTED], + ) + + assert isinstance(events, list) + for event in events: + assert event.event_type == EventType.EXECUTION_REQUESTED + + +class TestGetUserExecutions: + """Tests for get_user_executions method.""" + + @pytest.mark.asyncio + async def test_get_user_executions(self, scope: AsyncContainer) -> None: + """Get user executions returns list.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create some executions + for i in range(3): + await svc.execute_script( + script=f"print({i})", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Get user executions + executions = await svc.get_user_executions(user_id=user_id, limit=10, skip=0) + + assert isinstance(executions, list) + assert len(executions) >= 3 + for execution in executions: + assert execution.user_id == user_id + + @pytest.mark.asyncio + async def test_get_user_executions_pagination( + self, scope: AsyncContainer + ) -> None: + """Pagination works for user executions.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create executions + for i in range(5): + await svc.execute_script( + script=f"print({i})", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Get first page + page1 = await svc.get_user_executions(user_id=user_id, limit=2, skip=0) + assert len(page1) == 2 + + # Get second page + page2 = await svc.get_user_executions(user_id=user_id, limit=2, skip=2) + assert len(page2) == 2 + + # Ensure different results + page1_ids = {e.execution_id for e in page1} + page2_ids = {e.execution_id for e in page2} + assert page1_ids.isdisjoint(page2_ids) + + @pytest.mark.asyncio + async def test_get_user_executions_filter_by_language( + self, scope: AsyncContainer + ) -> None: + """Filter executions by language.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + await svc.execute_script( + script="print('python')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + executions = await svc.get_user_executions( + user_id=user_id, lang="python", limit=10, skip=0 + ) + + assert isinstance(executions, list) + for execution in executions: + assert execution.lang == "python" + + +class TestCountUserExecutions: + """Tests for count_user_executions method.""" + + @pytest.mark.asyncio + async def test_count_user_executions(self, scope: AsyncContainer) -> None: + """Count user executions returns correct count.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Get initial count + initial_count = await svc.count_user_executions(user_id=user_id) + + # Create executions + for _ in range(3): + await svc.execute_script( + script="print('count')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Count again + new_count = await svc.count_user_executions(user_id=user_id) + + assert new_count == initial_count + 3 + + +class TestDeleteExecution: + """Tests for delete_execution method.""" + + @pytest.mark.asyncio + async def test_delete_execution(self, scope: AsyncContainer) -> None: + """Delete execution removes it from database.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create execution + exec_result = await svc.execute_script( + script="print('to delete')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Delete it + deleted = await svc.delete_execution(exec_result.execution_id) + assert deleted is True + + # Verify it's gone + with pytest.raises(ExecutionNotFoundError): + await svc.get_execution_result(exec_result.execution_id) + + @pytest.mark.asyncio + async def test_delete_nonexistent_execution( + self, scope: AsyncContainer + ) -> None: + """Delete nonexistent execution raises error.""" + svc: ExecutionService = await scope.get(ExecutionService) + + with pytest.raises(ExecutionNotFoundError): + await svc.delete_execution("nonexistent-id") + + +class TestGetExecutionStats: + """Tests for get_execution_stats method.""" + + @pytest.mark.asyncio + async def test_get_execution_stats(self, scope: AsyncContainer) -> None: + """Get execution statistics for user.""" + svc: ExecutionService = await scope.get(ExecutionService) + user_id = f"test_user_{uuid.uuid4().hex[:8]}" + + # Create some executions + for i in range(2): + await svc.execute_script( + script=f"print({i})", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + # Get stats + stats = await svc.get_execution_stats(user_id=user_id) + + assert isinstance(stats, dict) + assert "total" in stats + assert stats["total"] >= 2 + assert "by_status" in stats + assert "by_language" in stats diff --git a/backend/tests/integration/services/idempotency/test_redis_repository.py b/backend/tests/e2e/services/idempotency/test_redis_repository.py similarity index 99% rename from backend/tests/integration/services/idempotency/test_redis_repository.py rename to backend/tests/e2e/services/idempotency/test_redis_repository.py index 6537ee8e..99f305d3 100644 --- a/backend/tests/integration/services/idempotency/test_redis_repository.py +++ b/backend/tests/e2e/services/idempotency/test_redis_repository.py @@ -12,7 +12,7 @@ ) from pymongo.errors import DuplicateKeyError -pytestmark = [pytest.mark.integration, pytest.mark.redis] +pytestmark = [pytest.mark.e2e, pytest.mark.redis] class TestHelperFunctions: diff --git a/backend/tests/e2e/services/notifications/test_notification_service.py b/backend/tests/e2e/services/notifications/test_notification_service.py new file mode 100644 index 00000000..c7bb0da3 --- /dev/null +++ b/backend/tests/e2e/services/notifications/test_notification_service.py @@ -0,0 +1,550 @@ +import uuid + +import pytest +from app.db.repositories import NotificationRepository +from app.domain.enums.notification import ( + NotificationChannel, + NotificationSeverity, +) +from app.domain.notification import ( + DomainNotificationListResult, + NotificationNotFoundError, + NotificationValidationError, +) +from app.services.notification_service import NotificationService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] + + +def _unique_user_id() -> str: + return f"notif_user_{uuid.uuid4().hex[:8]}" + + +class TestCreateNotification: + """Tests for create_notification method.""" + + @pytest.mark.asyncio + async def test_create_notification_basic(self, scope: AsyncContainer) -> None: + """Create basic in-app notification.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + notification = await svc.create_notification( + user_id=user_id, + subject="Test Subject", + body="Test body content", + tags=["test", "basic"], + severity=NotificationSeverity.MEDIUM, + channel=NotificationChannel.IN_APP, + ) + + assert notification.notification_id is not None + assert notification.user_id == user_id + assert notification.subject == "Test Subject" + assert notification.body == "Test body content" + assert notification.severity == NotificationSeverity.MEDIUM + assert notification.channel == NotificationChannel.IN_APP + assert "test" in notification.tags + assert "basic" in notification.tags + + @pytest.mark.asyncio + async def test_create_notification_with_metadata( + self, scope: AsyncContainer + ) -> None: + """Create notification with metadata.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + notification = await svc.create_notification( + user_id=user_id, + subject="With Metadata", + body="Body", + tags=["meta"], + metadata={"execution_id": "exec-123", "duration": 45.5}, + ) + + assert notification.metadata is not None + assert notification.metadata.get("execution_id") == "exec-123" + assert notification.metadata.get("duration") == 45.5 + + @pytest.mark.asyncio + async def test_create_notification_with_action_url( + self, scope: AsyncContainer + ) -> None: + """Create notification with action URL.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + notification = await svc.create_notification( + user_id=user_id, + subject="Action Required", + body="Click to view", + tags=["action"], + action_url="/executions/exec-123", + ) + + assert notification.action_url == "/executions/exec-123" + + @pytest.mark.asyncio + async def test_create_notification_all_severities( + self, scope: AsyncContainer + ) -> None: + """Create notifications with all severity levels.""" + svc: NotificationService = await scope.get(NotificationService) + + severities = [ + NotificationSeverity.LOW, + NotificationSeverity.MEDIUM, + NotificationSeverity.HIGH, + NotificationSeverity.URGENT, + ] + + for severity in severities: + user_id = _unique_user_id() + notification = await svc.create_notification( + user_id=user_id, + subject=f"Severity {severity}", + body="Body", + tags=["severity-test"], + severity=severity, + ) + assert notification.severity == severity + + @pytest.mark.asyncio + async def test_create_notification_empty_tags_raises( + self, scope: AsyncContainer + ) -> None: + """Create notification with empty tags raises validation error.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + with pytest.raises(NotificationValidationError): + await svc.create_notification( + user_id=user_id, + subject="No Tags", + body="Body", + tags=[], + ) + + +class TestMarkAsRead: + """Tests for mark_as_read method.""" + + @pytest.mark.asyncio + async def test_mark_as_read_success(self, scope: AsyncContainer) -> None: + """Mark notification as read successfully.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create notification + notification = await svc.create_notification( + user_id=user_id, + subject="To Read", + body="Body", + tags=["read-test"], + ) + + # Mark as read + result = await svc.mark_as_read(user_id, notification.notification_id) + assert result is True + + @pytest.mark.asyncio + async def test_mark_as_read_nonexistent_raises( + self, scope: AsyncContainer + ) -> None: + """Mark nonexistent notification as read raises error.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + with pytest.raises(NotificationNotFoundError): + await svc.mark_as_read(user_id, "nonexistent-notification-id") + + +class TestMarkAllAsRead: + """Tests for mark_all_as_read method.""" + + @pytest.mark.asyncio + async def test_mark_all_as_read(self, scope: AsyncContainer) -> None: + """Mark all notifications as read.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create multiple notifications + for i in range(3): + await svc.create_notification( + user_id=user_id, + subject=f"Notification {i}", + body="Body", + tags=["bulk-read"], + ) + + # Mark all as read + count = await svc.mark_all_as_read(user_id) + assert count >= 3 + + # Verify unread count is 0 + unread = await svc.get_unread_count(user_id) + assert unread == 0 + + +class TestGetUnreadCount: + """Tests for get_unread_count method.""" + + @pytest.mark.asyncio + async def test_get_unread_count(self, scope: AsyncContainer) -> None: + """Get unread notification count.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Initial count should be 0 + initial = await svc.get_unread_count(user_id) + assert initial == 0 + + # Create notifications + for _ in range(2): + await svc.create_notification( + user_id=user_id, + subject="Unread", + body="Body", + tags=["count-test"], + ) + + # Count should increase + count = await svc.get_unread_count(user_id) + assert count >= 2 + + +class TestListNotifications: + """Tests for list_notifications method.""" + + @pytest.mark.asyncio + async def test_list_notifications_basic(self, scope: AsyncContainer) -> None: + """List notifications with pagination.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create notifications + for i in range(5): + await svc.create_notification( + user_id=user_id, + subject=f"List Test {i}", + body="Body", + tags=["list-test"], + ) + + # List with pagination + result = await svc.list_notifications(user_id, limit=3, offset=0) + + assert isinstance(result, DomainNotificationListResult) + assert len(result.notifications) <= 3 + assert result.total >= 5 + + @pytest.mark.asyncio + async def test_list_notifications_with_tag_filter( + self, scope: AsyncContainer + ) -> None: + """List notifications filtered by tags.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create with different tags + await svc.create_notification( + user_id=user_id, + subject="Tagged A", + body="Body", + tags=["filter-a"], + ) + await svc.create_notification( + user_id=user_id, + subject="Tagged B", + body="Body", + tags=["filter-b"], + ) + + # Filter by tag + result = await svc.list_notifications( + user_id, include_tags=["filter-a"] + ) + + assert isinstance(result, DomainNotificationListResult) + for notif in result.notifications: + assert "filter-a" in notif.tags + + @pytest.mark.asyncio + async def test_list_notifications_exclude_tags( + self, scope: AsyncContainer + ) -> None: + """List notifications excluding specific tags.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create notifications + await svc.create_notification( + user_id=user_id, + subject="Include Me", + body="Body", + tags=["include"], + ) + await svc.create_notification( + user_id=user_id, + subject="Exclude Me", + body="Body", + tags=["exclude"], + ) + + # List excluding 'exclude' tag + result = await svc.list_notifications( + user_id, exclude_tags=["exclude"] + ) + + assert isinstance(result, DomainNotificationListResult) + for notif in result.notifications: + assert "exclude" not in notif.tags + + +class TestDeleteNotification: + """Tests for delete_notification method.""" + + @pytest.mark.asyncio + async def test_delete_notification_success(self, scope: AsyncContainer) -> None: + """Delete notification successfully.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create notification + notification = await svc.create_notification( + user_id=user_id, + subject="To Delete", + body="Body", + tags=["delete-test"], + ) + + # Delete it + result = await svc.delete_notification(user_id, notification.notification_id) + assert result is True + + @pytest.mark.asyncio + async def test_delete_nonexistent_notification_raises( + self, scope: AsyncContainer + ) -> None: + """Delete nonexistent notification raises error.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + with pytest.raises(NotificationNotFoundError): + await svc.delete_notification(user_id, "nonexistent-id") + + +class TestSubscriptions: + """Tests for subscription management.""" + + @pytest.mark.asyncio + async def test_update_subscription_in_app(self, scope: AsyncContainer) -> None: + """Update in-app subscription.""" + svc: NotificationService = await scope.get(NotificationService) + repo: NotificationRepository = await scope.get(NotificationRepository) + user_id = _unique_user_id() + + # Update subscription + subscription = await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=True, + ) + + assert subscription.channel == NotificationChannel.IN_APP + assert subscription.enabled is True + + # Verify via repo + stored = await repo.get_subscription(user_id, NotificationChannel.IN_APP) + assert stored is not None + assert stored.enabled is True + + @pytest.mark.asyncio + async def test_update_subscription_disable(self, scope: AsyncContainer) -> None: + """Disable subscription channel.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Enable first + await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=True, + ) + + # Then disable + subscription = await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=False, + ) + + assert subscription.enabled is False + + @pytest.mark.asyncio + async def test_update_subscription_with_severities( + self, scope: AsyncContainer + ) -> None: + """Update subscription with severity filter.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + subscription = await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=True, + severities=[NotificationSeverity.HIGH, NotificationSeverity.URGENT], + ) + + assert subscription.severities is not None + assert NotificationSeverity.HIGH in subscription.severities + assert NotificationSeverity.URGENT in subscription.severities + + @pytest.mark.asyncio + async def test_update_webhook_requires_url(self, scope: AsyncContainer) -> None: + """Webhook subscription requires URL when enabled.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + with pytest.raises(NotificationValidationError): + await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.WEBHOOK, + enabled=True, + # No webhook_url provided + ) + + @pytest.mark.asyncio + async def test_update_webhook_with_url(self, scope: AsyncContainer) -> None: + """Webhook subscription with valid URL.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + subscription = await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.WEBHOOK, + enabled=True, + webhook_url="https://example.com/webhook", + ) + + assert subscription.enabled is True + assert subscription.webhook_url == "https://example.com/webhook" + + @pytest.mark.asyncio + async def test_get_all_subscriptions(self, scope: AsyncContainer) -> None: + """Get all subscriptions for user.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Update a subscription + await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=True, + ) + + # Get all + subscriptions = await svc.get_subscriptions(user_id) + + assert isinstance(subscriptions, dict) + assert NotificationChannel.IN_APP in subscriptions + + +class TestSystemNotification: + """Tests for create_system_notification method.""" + + @pytest.mark.asyncio + async def test_create_system_notification_to_users( + self, scope: AsyncContainer + ) -> None: + """Create system notification targeting specific users.""" + svc: NotificationService = await scope.get(NotificationService) + target_users = [_unique_user_id(), _unique_user_id()] + + stats = await svc.create_system_notification( + title="System Alert", + message="Important system message", + severity=NotificationSeverity.HIGH, + tags=["system", "alert"], + target_users=target_users, + ) + + assert isinstance(stats, dict) + assert stats["total_users"] == 2 + assert "created" in stats + assert "failed" in stats + assert "throttled" in stats + + @pytest.mark.asyncio + async def test_create_system_notification_empty_targets( + self, scope: AsyncContainer + ) -> None: + """System notification with no targets returns zero stats.""" + svc: NotificationService = await scope.get(NotificationService) + + stats = await svc.create_system_notification( + title="No Targets", + message="Message", + target_users=[], + ) + + assert stats["total_users"] == 0 + assert stats["created"] == 0 + + +class TestNotificationIntegration: + """Integration tests for notification workflow.""" + + @pytest.mark.asyncio + async def test_full_notification_lifecycle(self, scope: AsyncContainer) -> None: + """Test complete notification lifecycle: create -> read -> delete.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Create + notification = await svc.create_notification( + user_id=user_id, + subject="Lifecycle Test", + body="Testing full lifecycle", + tags=["lifecycle"], + ) + assert notification.notification_id is not None + + # Verify unread count increased + unread = await svc.get_unread_count(user_id) + assert unread >= 1 + + # Mark as read + await svc.mark_as_read(user_id, notification.notification_id) + + # Delete + deleted = await svc.delete_notification(user_id, notification.notification_id) + assert deleted is True + + @pytest.mark.asyncio + async def test_notification_with_subscription_filter( + self, scope: AsyncContainer + ) -> None: + """Notification delivery respects subscription filters.""" + svc: NotificationService = await scope.get(NotificationService) + user_id = _unique_user_id() + + # Set up subscription with severity filter + await svc.update_subscription( + user_id=user_id, + channel=NotificationChannel.IN_APP, + enabled=True, + severities=[NotificationSeverity.HIGH], + ) + + # Create HIGH severity notification - should be delivered + high_notif = await svc.create_notification( + user_id=user_id, + subject="High Priority", + body="Body", + tags=["filter-test"], + severity=NotificationSeverity.HIGH, + ) + assert high_notif.notification_id is not None diff --git a/backend/tests/integration/services/rate_limit/test_rate_limit_service.py b/backend/tests/e2e/services/rate_limit/test_rate_limit_service.py similarity index 99% rename from backend/tests/integration/services/rate_limit/test_rate_limit_service.py rename to backend/tests/e2e/services/rate_limit/test_rate_limit_service.py index 0476f048..80ad2d03 100644 --- a/backend/tests/integration/services/rate_limit/test_rate_limit_service.py +++ b/backend/tests/e2e/services/rate_limit/test_rate_limit_service.py @@ -15,7 +15,7 @@ from app.services.rate_limit_service import RateLimitService from dishka import AsyncContainer -pytestmark = [pytest.mark.integration, pytest.mark.redis] +pytestmark = [pytest.mark.e2e, pytest.mark.redis] @pytest.mark.asyncio diff --git a/backend/tests/e2e/services/replay/test_replay_service.py b/backend/tests/e2e/services/replay/test_replay_service.py new file mode 100644 index 00000000..5dccec5f --- /dev/null +++ b/backend/tests/e2e/services/replay/test_replay_service.py @@ -0,0 +1,173 @@ +import pytest +from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType +from app.domain.replay.exceptions import ReplaySessionNotFoundError +from app.services.event_replay import ReplayConfig, ReplayFilter +from app.services.replay_service import ReplayService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +class TestCreateSession: + """Tests for create_session_from_config method.""" + + @pytest.mark.asyncio + async def test_create_session_execution_type( + self, scope: AsyncContainer + ) -> None: + """Create replay session for execution events.""" + svc: ReplayService = await scope.get(ReplayService) + + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=ReplayFilter(), + max_events=1, + ) + result = await svc.create_session_from_config(cfg) + + assert result.session_id is not None + assert result.status in [ + ReplayStatus.CREATED, + ReplayStatus.RUNNING, + ReplayStatus.COMPLETED, + ] + assert result.message is not None + + @pytest.mark.asyncio + async def test_create_session_with_max_events( + self, scope: AsyncContainer + ) -> None: + """Create session with event limit.""" + svc: ReplayService = await scope.get(ReplayService) + + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=ReplayFilter(), + max_events=100, + ) + result = await svc.create_session_from_config(cfg) + + assert result.session_id is not None + assert result.status == ReplayStatus.CREATED + + @pytest.mark.asyncio + async def test_create_session_with_filter( + self, scope: AsyncContainer + ) -> None: + """Create session with event filter.""" + svc: ReplayService = await scope.get(ReplayService) + + replay_filter = ReplayFilter( + aggregate_id="exec-1", + ) + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=replay_filter, + max_events=10, + ) + result = await svc.create_session_from_config(cfg) + + assert result.session_id is not None + + +class TestListSessions: + """Tests for list_sessions method.""" + + @pytest.mark.asyncio + async def test_list_sessions(self, scope: AsyncContainer) -> None: + """List replay sessions.""" + svc: ReplayService = await scope.get(ReplayService) + + # Create a session first + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=ReplayFilter(), + max_events=1, + ) + created = await svc.create_session_from_config(cfg) + + # List sessions + sessions = svc.list_sessions(limit=10) + + assert isinstance(sessions, list) + assert any(s.session_id == created.session_id for s in sessions) + + @pytest.mark.asyncio + async def test_list_sessions_with_limit(self, scope: AsyncContainer) -> None: + """List sessions respects limit.""" + svc: ReplayService = await scope.get(ReplayService) + + sessions = svc.list_sessions(limit=5) + + assert isinstance(sessions, list) + assert len(sessions) <= 5 + + +class TestGetSession: + """Tests for get_session method.""" + + @pytest.mark.asyncio + async def test_get_session_by_id(self, scope: AsyncContainer) -> None: + """Get session by ID.""" + svc: ReplayService = await scope.get(ReplayService) + + # Create a session + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=ReplayFilter(), + max_events=1, + ) + created = await svc.create_session_from_config(cfg) + + # Get session + session = svc.get_session(created.session_id) + + assert session is not None + assert session.session_id == created.session_id + + @pytest.mark.asyncio + async def test_get_session_not_found(self, scope: AsyncContainer) -> None: + """Get nonexistent session raises error.""" + svc: ReplayService = await scope.get(ReplayService) + + with pytest.raises(ReplaySessionNotFoundError): + svc.get_session("nonexistent-session-id") + + +class TestCancelSession: + """Tests for cancel_session method.""" + + @pytest.mark.asyncio + async def test_cancel_session(self, scope: AsyncContainer) -> None: + """Cancel a replay session.""" + svc: ReplayService = await scope.get(ReplayService) + + # Create a session + cfg = ReplayConfig( + replay_type=ReplayType.EXECUTION, + target=ReplayTarget.TEST, + filter=ReplayFilter(), + max_events=1000, # Large limit so it doesn't complete immediately + ) + created = await svc.create_session_from_config(cfg) + + # Cancel session + result = await svc.cancel_session(created.session_id) + + assert result.session_id == created.session_id + assert result.status == ReplayStatus.CANCELLED + + @pytest.mark.asyncio + async def test_cancel_nonexistent_session( + self, scope: AsyncContainer + ) -> None: + """Cancel nonexistent session raises error.""" + svc: ReplayService = await scope.get(ReplayService) + + with pytest.raises(ReplaySessionNotFoundError): + await svc.cancel_session("nonexistent-session-id") diff --git a/backend/tests/e2e/services/saga/test_saga_service.py b/backend/tests/e2e/services/saga/test_saga_service.py new file mode 100644 index 00000000..1d936625 --- /dev/null +++ b/backend/tests/e2e/services/saga/test_saga_service.py @@ -0,0 +1,362 @@ +from datetime import datetime, timezone +from uuid import uuid4 + +import pytest +from app.db.repositories import ExecutionRepository, SagaRepository +from app.domain.enums import SagaState +from app.domain.enums.user import UserRole +from app.domain.execution import DomainExecutionCreate +from app.domain.saga.exceptions import SagaAccessDeniedError, SagaNotFoundError +from app.domain.saga.models import Saga, SagaListResult +from app.schemas_pydantic.user import User +from app.services.execution_service import ExecutionService +from app.services.saga.saga_service import SagaService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] + + +def make_test_user( + user_id: str = "test_user_1", + role: UserRole = UserRole.USER, +) -> User: + """Create a test user for saga access checks.""" + return User( + user_id=user_id, + username=user_id, + email=f"{user_id}@example.com", + role=role, + is_active=True, + is_superuser=role == UserRole.ADMIN, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + +async def create_execution_for_user( + exec_repo: ExecutionRepository, + user_id: str, +) -> str: + """Create an execution record for a user and return its ID.""" + execution = await exec_repo.create_execution( + DomainExecutionCreate( + script="print('test')", + lang="python", + lang_version="3.11", + user_id=user_id, + ) + ) + return execution.execution_id + + +async def create_saga_for_execution( + saga_repo: SagaRepository, + execution_id: str, + state: SagaState = SagaState.CREATED, + saga_name: str = "test_saga", +) -> Saga: + """Create a saga for an execution with the given state.""" + saga = Saga( + saga_id=str(uuid4()), + saga_name=saga_name, + execution_id=execution_id, + state=state, + current_step="step1", + completed_steps=[], + compensated_steps=[], + context_data={}, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + await saga_repo.upsert_saga(saga) + return saga + + +class TestListUserSagas: + """Tests for list_user_sagas method.""" + + @pytest.mark.asyncio + async def test_list_user_sagas_empty(self, scope: AsyncContainer) -> None: + """List sagas for user with no sagas returns empty list.""" + svc: SagaService = await scope.get(SagaService) + # Use a unique user ID that has never been used + user = make_test_user(user_id=f"empty_user_{uuid4().hex[:8]}") + + result = await svc.list_user_sagas(user) + + assert isinstance(result, SagaListResult) + assert result.total == 0 + assert len(result.sagas) == 0 + + @pytest.mark.asyncio + async def test_list_user_sagas_with_limit(self, scope: AsyncContainer) -> None: + """List sagas respects limit parameter.""" + svc: SagaService = await scope.get(SagaService) + exec_repo: ExecutionRepository = await scope.get(ExecutionRepository) + saga_repo: SagaRepository = await scope.get(SagaRepository) + + # Create unique user + user_id = f"limit_user_{uuid4().hex[:8]}" + user = make_test_user(user_id=user_id) + + # Create 7 sagas for this user (more than limit of 5) + created_count = 7 + for i in range(created_count): + exec_id = await create_execution_for_user(exec_repo, user_id) + await create_saga_for_execution(saga_repo, exec_id, saga_name=f"saga_{i}") + + result = await svc.list_user_sagas(user, limit=5) + + assert isinstance(result, SagaListResult) + assert len(result.sagas) == 5 + assert result.total == created_count + + @pytest.mark.asyncio + async def test_list_user_sagas_with_skip(self, scope: AsyncContainer) -> None: + """List sagas respects skip parameter.""" + svc: SagaService = await scope.get(SagaService) + exec_repo: ExecutionRepository = await scope.get(ExecutionRepository) + saga_repo: SagaRepository = await scope.get(SagaRepository) + + # Create unique user + user_id = f"skip_user_{uuid4().hex[:8]}" + user = make_test_user(user_id=user_id) + + # Create 5 sagas for this user + created_count = 5 + for i in range(created_count): + exec_id = await create_execution_for_user(exec_repo, user_id) + await create_saga_for_execution(saga_repo, exec_id, saga_name=f"saga_{i}") + + # Get all sagas (skip=0) + result_all = await svc.list_user_sagas(user, skip=0, limit=10) + assert result_all.total == created_count + assert len(result_all.sagas) == created_count + + # Skip first 2 sagas + result_skip = await svc.list_user_sagas(user, skip=2, limit=10) + assert result_skip.total == created_count + assert len(result_skip.sagas) == created_count - 2 + + @pytest.mark.asyncio + async def test_list_user_sagas_filter_by_state( + self, scope: AsyncContainer + ) -> None: + """List sagas filtered by state returns only matching state.""" + svc: SagaService = await scope.get(SagaService) + exec_repo: ExecutionRepository = await scope.get(ExecutionRepository) + saga_repo: SagaRepository = await scope.get(SagaRepository) + + # Create unique user + user_id = f"state_user_{uuid4().hex[:8]}" + user = make_test_user(user_id=user_id) + + # Create sagas with different states + created_count = 3 + for i in range(created_count): + exec_id = await create_execution_for_user(exec_repo, user_id) + await create_saga_for_execution( + saga_repo, exec_id, state=SagaState.CREATED, saga_name=f"created_{i}" + ) + + # Create 2 sagas with RUNNING state + running_count = 2 + for i in range(running_count): + exec_id = await create_execution_for_user(exec_repo, user_id) + await create_saga_for_execution( + saga_repo, exec_id, state=SagaState.RUNNING, saga_name=f"running_{i}" + ) + + # Filter by CREATED state + result = await svc.list_user_sagas(user, state=SagaState.CREATED) + + assert isinstance(result, SagaListResult) + assert result.total == created_count + assert len(result.sagas) == created_count + for saga in result.sagas: + assert saga.state == SagaState.CREATED + + @pytest.mark.asyncio + async def test_admin_can_list_all_sagas(self, scope: AsyncContainer) -> None: + """Admin user can list all sagas.""" + svc: SagaService = await scope.get(SagaService) + admin = make_test_user(user_id="admin_user", role=UserRole.ADMIN) + + result = await svc.list_user_sagas(admin) + + assert isinstance(result, SagaListResult) + assert isinstance(result.sagas, list) + + +class TestGetSagaWithAccessCheck: + """Tests for get_saga_with_access_check method.""" + + @pytest.mark.asyncio + async def test_get_saga_not_found(self, scope: AsyncContainer) -> None: + """Get nonexistent saga raises SagaNotFoundError.""" + svc: SagaService = await scope.get(SagaService) + user = make_test_user() + + with pytest.raises(SagaNotFoundError): + await svc.get_saga_with_access_check("nonexistent-saga-id", user) + + +class TestCheckExecutionAccess: + """Tests for check_execution_access method.""" + + @pytest.mark.asyncio + async def test_admin_has_access_to_any_execution( + self, scope: AsyncContainer + ) -> None: + """Admin has access to any execution.""" + svc: SagaService = await scope.get(SagaService) + exec_svc: ExecutionService = await scope.get(ExecutionService) + admin = make_test_user(user_id="admin_user", role=UserRole.ADMIN) + + # Create execution as different user + exec_result = await exec_svc.execute_script( + script="print('admin access test')", + user_id="other_user", + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + has_access = await svc.check_execution_access( + exec_result.execution_id, admin + ) + assert has_access is True + + @pytest.mark.asyncio + async def test_user_has_access_to_own_execution( + self, scope: AsyncContainer + ) -> None: + """User has access to their own execution.""" + svc: SagaService = await scope.get(SagaService) + exec_svc: ExecutionService = await scope.get(ExecutionService) + user_id = "saga_owner_user" + user = make_test_user(user_id=user_id) + + exec_result = await exec_svc.execute_script( + script="print('owner access test')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + has_access = await svc.check_execution_access( + exec_result.execution_id, user + ) + assert has_access is True + + @pytest.mark.asyncio + async def test_user_no_access_to_other_execution( + self, scope: AsyncContainer + ) -> None: + """User does not have access to other user's execution.""" + svc: SagaService = await scope.get(SagaService) + exec_svc: ExecutionService = await scope.get(ExecutionService) + other_user = make_test_user(user_id="different_user") + + exec_result = await exec_svc.execute_script( + script="print('no access test')", + user_id="owner_user", + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + has_access = await svc.check_execution_access( + exec_result.execution_id, other_user + ) + assert has_access is False + + @pytest.mark.asyncio + async def test_access_to_nonexistent_execution( + self, scope: AsyncContainer + ) -> None: + """Access check for nonexistent execution returns False.""" + svc: SagaService = await scope.get(SagaService) + user = make_test_user() + + has_access = await svc.check_execution_access("nonexistent-id", user) + assert has_access is False + + +class TestGetExecutionSagas: + """Tests for get_execution_sagas method.""" + + @pytest.mark.asyncio + async def test_get_execution_sagas_access_denied( + self, scope: AsyncContainer + ) -> None: + """Get sagas for execution without access raises error.""" + svc: SagaService = await scope.get(SagaService) + exec_svc: ExecutionService = await scope.get(ExecutionService) + other_user = make_test_user(user_id="no_access_user") + + exec_result = await exec_svc.execute_script( + script="print('saga access denied')", + user_id="owner_user", + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + with pytest.raises(SagaAccessDeniedError): + await svc.get_execution_sagas(exec_result.execution_id, other_user) + + @pytest.mark.asyncio + async def test_get_execution_sagas_owner_access( + self, scope: AsyncContainer + ) -> None: + """Owner can get sagas for their execution.""" + svc: SagaService = await scope.get(SagaService) + exec_svc: ExecutionService = await scope.get(ExecutionService) + user_id = "saga_exec_owner" + user = make_test_user(user_id=user_id) + + exec_result = await exec_svc.execute_script( + script="print('owner sagas')", + user_id=user_id, + client_ip="127.0.0.1", + user_agent="pytest", + lang="python", + lang_version="3.11", + ) + + result = await svc.get_execution_sagas(exec_result.execution_id, user) + + assert isinstance(result, SagaListResult) + assert isinstance(result.sagas, list) + + +class TestGetSagaStatistics: + """Tests for get_saga_statistics method.""" + + @pytest.mark.asyncio + async def test_get_saga_statistics_user(self, scope: AsyncContainer) -> None: + """Get saga statistics for regular user.""" + svc: SagaService = await scope.get(SagaService) + user = make_test_user() + + stats = await svc.get_saga_statistics(user) + + assert isinstance(stats, dict) + + @pytest.mark.asyncio + async def test_get_saga_statistics_admin_all( + self, scope: AsyncContainer + ) -> None: + """Admin can get all saga statistics.""" + svc: SagaService = await scope.get(SagaService) + admin = make_test_user(user_id="stats_admin", role=UserRole.ADMIN) + + stats = await svc.get_saga_statistics(admin, include_all=True) + + assert isinstance(stats, dict) diff --git a/backend/tests/e2e/services/saved_script/test_saved_script_service.py b/backend/tests/e2e/services/saved_script/test_saved_script_service.py new file mode 100644 index 00000000..57ce1b91 --- /dev/null +++ b/backend/tests/e2e/services/saved_script/test_saved_script_service.py @@ -0,0 +1,452 @@ +import uuid + +import pytest +from app.domain.saved_script import ( + DomainSavedScript, + DomainSavedScriptCreate, + DomainSavedScriptUpdate, + SavedScriptNotFoundError, +) +from app.services.saved_script_service import SavedScriptService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb] + + +def _unique_user_id() -> str: + return f"script_user_{uuid.uuid4().hex[:8]}" + + +def _create_payload( + name: str = "test_script", + description: str | None = None, + script: str = "print('hello world')", +) -> DomainSavedScriptCreate: + return DomainSavedScriptCreate( + name=name, + description=description, + script=script, + ) + + +class TestCreateSavedScript: + """Tests for create_saved_script method.""" + + @pytest.mark.asyncio + async def test_create_saved_script_basic(self, scope: AsyncContainer) -> None: + """Create a basic saved script.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + payload = _create_payload(name="Basic Script") + created = await service.create_saved_script(payload, user_id) + + assert isinstance(created, DomainSavedScript) + assert created.script_id is not None + assert created.user_id == user_id + assert created.name == "Basic Script" + assert created.script == "print('hello world')" + assert created.created_at is not None + assert created.updated_at is not None + + @pytest.mark.asyncio + async def test_create_saved_script_with_description( + self, scope: AsyncContainer + ) -> None: + """Create saved script with description.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + payload = _create_payload( + name="Described Script", + description="This script does something useful", + ) + created = await service.create_saved_script(payload, user_id) + + assert created.description == "This script does something useful" + + @pytest.mark.asyncio + async def test_create_saved_script_multiline(self, scope: AsyncContainer) -> None: + """Create saved script with multiline code.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + multiline_script = """def hello(): + print('Hello World') + +if __name__ == '__main__': + hello() +""" + payload = _create_payload(name="Multiline", script=multiline_script) + created = await service.create_saved_script(payload, user_id) + + assert created.script == multiline_script + assert "def hello():" in created.script + + @pytest.mark.asyncio + async def test_create_multiple_scripts_same_user( + self, scope: AsyncContainer + ) -> None: + """User can create multiple scripts.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + scripts = [] + for i in range(3): + payload = _create_payload(name=f"Script {i}", script=f"print({i})") + created = await service.create_saved_script(payload, user_id) + scripts.append(created) + + # All should have unique IDs + script_ids = [s.script_id for s in scripts] + assert len(set(script_ids)) == 3 + + @pytest.mark.asyncio + async def test_create_scripts_different_users_isolated( + self, scope: AsyncContainer + ) -> None: + """Scripts from different users are isolated.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user1 = _unique_user_id() + user2 = _unique_user_id() + + # Create scripts for each user + payload1 = _create_payload(name="User1 Script") + script1 = await service.create_saved_script(payload1, user1) + + payload2 = _create_payload(name="User2 Script") + script2 = await service.create_saved_script(payload2, user2) + + # List each user's scripts + user1_scripts = await service.list_saved_scripts(user1) + user2_scripts = await service.list_saved_scripts(user2) + + # Should only see their own + assert any(s.script_id == script1.script_id for s in user1_scripts) + assert not any(s.script_id == script2.script_id for s in user1_scripts) + + assert any(s.script_id == script2.script_id for s in user2_scripts) + assert not any(s.script_id == script1.script_id for s in user2_scripts) + + +class TestGetSavedScript: + """Tests for get_saved_script method.""" + + @pytest.mark.asyncio + async def test_get_saved_script_success(self, scope: AsyncContainer) -> None: + """Get saved script by ID.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create + payload = _create_payload(name="To Retrieve") + created = await service.create_saved_script(payload, user_id) + + # Get + retrieved = await service.get_saved_script(str(created.script_id), user_id) + + assert retrieved is not None + assert retrieved.script_id == created.script_id + assert retrieved.name == "To Retrieve" + assert retrieved.user_id == user_id + + @pytest.mark.asyncio + async def test_get_saved_script_not_found(self, scope: AsyncContainer) -> None: + """Get nonexistent script raises error.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + with pytest.raises(SavedScriptNotFoundError): + await service.get_saved_script("nonexistent-script-id", user_id) + + @pytest.mark.asyncio + async def test_get_saved_script_wrong_user(self, scope: AsyncContainer) -> None: + """Cannot get another user's script.""" + service: SavedScriptService = await scope.get(SavedScriptService) + owner = _unique_user_id() + other_user = _unique_user_id() + + # Create as owner + payload = _create_payload(name="Private Script") + created = await service.create_saved_script(payload, owner) + + # Try to get as other user + with pytest.raises(SavedScriptNotFoundError): + await service.get_saved_script(str(created.script_id), other_user) + + +class TestUpdateSavedScript: + """Tests for update_saved_script method.""" + + @pytest.mark.asyncio + async def test_update_saved_script_name(self, scope: AsyncContainer) -> None: + """Update script name.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create + payload = _create_payload(name="Original Name") + created = await service.create_saved_script(payload, user_id) + + # Update + update = DomainSavedScriptUpdate(name="Updated Name", script=created.script) + updated = await service.update_saved_script( + str(created.script_id), user_id, update + ) + + assert updated is not None + assert updated.name == "Updated Name" + + @pytest.mark.asyncio + async def test_update_saved_script_content(self, scope: AsyncContainer) -> None: + """Update script content.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create + payload = _create_payload(script="print('original')") + created = await service.create_saved_script(payload, user_id) + + # Update + update = DomainSavedScriptUpdate(name=created.name, script="print('updated')") + updated = await service.update_saved_script( + str(created.script_id), user_id, update + ) + + assert updated.script == "print('updated')" + + @pytest.mark.asyncio + async def test_update_saved_script_description( + self, scope: AsyncContainer + ) -> None: + """Update script description.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create without description + payload = _create_payload(name="No Desc") + created = await service.create_saved_script(payload, user_id) + assert created.description is None + + # Update with description + update = DomainSavedScriptUpdate( + name=created.name, + script=created.script, + description="Now has description", + ) + updated = await service.update_saved_script( + str(created.script_id), user_id, update + ) + + assert updated.description == "Now has description" + + @pytest.mark.asyncio + async def test_update_saved_script_not_found(self, scope: AsyncContainer) -> None: + """Update nonexistent script raises error.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + update = DomainSavedScriptUpdate(name="New Name", script="print(1)") + with pytest.raises(SavedScriptNotFoundError): + await service.update_saved_script("nonexistent-id", user_id, update) + + @pytest.mark.asyncio + async def test_update_saved_script_wrong_user(self, scope: AsyncContainer) -> None: + """Cannot update another user's script.""" + service: SavedScriptService = await scope.get(SavedScriptService) + owner = _unique_user_id() + other_user = _unique_user_id() + + # Create as owner + payload = _create_payload(name="Owner Script") + created = await service.create_saved_script(payload, owner) + + # Try to update as other user + update = DomainSavedScriptUpdate(name="Hacked Name", script="print('hacked')") + with pytest.raises(SavedScriptNotFoundError): + await service.update_saved_script(str(created.script_id), other_user, update) + + +class TestDeleteSavedScript: + """Tests for delete_saved_script method.""" + + @pytest.mark.asyncio + async def test_delete_saved_script_success(self, scope: AsyncContainer) -> None: + """Delete saved script successfully.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create + payload = _create_payload(name="To Delete") + created = await service.create_saved_script(payload, user_id) + + # Delete + await service.delete_saved_script(str(created.script_id), user_id) + + # Verify it's gone + with pytest.raises(SavedScriptNotFoundError): + await service.get_saved_script(str(created.script_id), user_id) + + @pytest.mark.asyncio + async def test_delete_saved_script_not_found(self, scope: AsyncContainer) -> None: + """Delete nonexistent script raises error.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + with pytest.raises(SavedScriptNotFoundError): + await service.delete_saved_script("nonexistent-id", user_id) + + @pytest.mark.asyncio + async def test_delete_saved_script_wrong_user(self, scope: AsyncContainer) -> None: + """Cannot delete another user's script.""" + service: SavedScriptService = await scope.get(SavedScriptService) + owner = _unique_user_id() + other_user = _unique_user_id() + + # Create as owner + payload = _create_payload(name="Owner Script") + created = await service.create_saved_script(payload, owner) + + # Try to delete as other user + with pytest.raises(SavedScriptNotFoundError): + await service.delete_saved_script(str(created.script_id), other_user) + + # Should still exist for owner + retrieved = await service.get_saved_script(str(created.script_id), owner) + assert retrieved is not None + + +class TestListSavedScripts: + """Tests for list_saved_scripts method.""" + + @pytest.mark.asyncio + async def test_list_saved_scripts_empty(self, scope: AsyncContainer) -> None: + """List scripts for user with none returns empty list.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + scripts = await service.list_saved_scripts(user_id) + + assert isinstance(scripts, list) + assert len(scripts) == 0 + + @pytest.mark.asyncio + async def test_list_saved_scripts_multiple(self, scope: AsyncContainer) -> None: + """List multiple scripts for user.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create multiple scripts + script_names = ["Script A", "Script B", "Script C"] + for name in script_names: + payload = _create_payload(name=name) + await service.create_saved_script(payload, user_id) + + # List + scripts = await service.list_saved_scripts(user_id) + + assert len(scripts) >= 3 + names = [s.name for s in scripts] + for expected_name in script_names: + assert expected_name in names + + @pytest.mark.asyncio + async def test_list_saved_scripts_user_isolated( + self, scope: AsyncContainer + ) -> None: + """Each user only sees their own scripts.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user1 = _unique_user_id() + user2 = _unique_user_id() + + # Create scripts for user1 + for i in range(2): + payload = _create_payload(name=f"User1 Script {i}") + await service.create_saved_script(payload, user1) + + # Create scripts for user2 + for i in range(3): + payload = _create_payload(name=f"User2 Script {i}") + await service.create_saved_script(payload, user2) + + # List user1's scripts + user1_scripts = await service.list_saved_scripts(user1) + assert len(user1_scripts) >= 2 + for script in user1_scripts: + assert script.user_id == user1 + + # List user2's scripts + user2_scripts = await service.list_saved_scripts(user2) + assert len(user2_scripts) >= 3 + for script in user2_scripts: + assert script.user_id == user2 + + +class TestSavedScriptIntegration: + """Integration tests for saved script workflow.""" + + @pytest.mark.asyncio + async def test_full_crud_lifecycle(self, scope: AsyncContainer) -> None: + """Test complete CRUD lifecycle.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + # Create + payload = _create_payload( + name="Lifecycle Script", + description="Testing lifecycle", + script="print('v1')", + ) + created = await service.create_saved_script(payload, user_id) + script_id = str(created.script_id) + + # Read + retrieved = await service.get_saved_script(script_id, user_id) + assert retrieved.name == "Lifecycle Script" + + # Update + update = DomainSavedScriptUpdate( + name="Updated Lifecycle Script", + description="Updated description", + script="print('v2')", + ) + updated = await service.update_saved_script(script_id, user_id, update) + assert updated.name == "Updated Lifecycle Script" + assert updated.script == "print('v2')" + + # List - should include our script + scripts = await service.list_saved_scripts(user_id) + assert any(s.script_id == created.script_id for s in scripts) + + # Delete + await service.delete_saved_script(script_id, user_id) + + # Verify deleted + with pytest.raises(SavedScriptNotFoundError): + await service.get_saved_script(script_id, user_id) + + @pytest.mark.asyncio + async def test_script_with_special_characters( + self, scope: AsyncContainer + ) -> None: + """Script content with special characters is preserved.""" + service: SavedScriptService = await scope.get(SavedScriptService) + user_id = _unique_user_id() + + special_script = """ +# Unicode: 你好世界 🌍 +def greet(name: str) -> str: + \"\"\"Greet someone with special chars: <>&'\\\"\"\"\" + return f"Hello, {name}! 👋" + +# Math symbols: ∑ ∫ √ ∞ +print(greet("World")) +""" + payload = _create_payload(name="Special Chars", script=special_script) + created = await service.create_saved_script(payload, user_id) + + # Retrieve and verify + retrieved = await service.get_saved_script(str(created.script_id), user_id) + assert "你好世界" in retrieved.script + assert "🌍" in retrieved.script + assert "∑" in retrieved.script diff --git a/backend/tests/integration/services/sse/__init__.py b/backend/tests/e2e/services/sse/__init__.py similarity index 100% rename from backend/tests/integration/services/sse/__init__.py rename to backend/tests/e2e/services/sse/__init__.py diff --git a/backend/tests/integration/services/sse/test_partitioned_event_router.py b/backend/tests/e2e/services/sse/test_partitioned_event_router.py similarity index 95% rename from backend/tests/integration/services/sse/test_partitioned_event_router.py rename to backend/tests/e2e/services/sse/test_partitioned_event_router.py index 7e1c4ac6..6bb6b71f 100644 --- a/backend/tests/integration/services/sse/test_partitioned_event_router.py +++ b/backend/tests/e2e/services/sse/test_partitioned_event_router.py @@ -12,9 +12,9 @@ from app.services.sse.redis_bus import SSERedisBus from app.settings import Settings -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event -pytestmark = [pytest.mark.integration, pytest.mark.redis] +pytestmark = [pytest.mark.e2e, pytest.mark.redis] _test_logger = logging.getLogger("test.services.sse.partitioned_event_router_integration") diff --git a/backend/tests/integration/services/sse/test_redis_bus.py b/backend/tests/e2e/services/sse/test_redis_bus.py similarity index 99% rename from backend/tests/integration/services/sse/test_redis_bus.py rename to backend/tests/e2e/services/sse/test_redis_bus.py index b22c5c1e..8d0ac726 100644 --- a/backend/tests/integration/services/sse/test_redis_bus.py +++ b/backend/tests/e2e/services/sse/test_redis_bus.py @@ -11,7 +11,7 @@ from app.schemas_pydantic.sse import RedisNotificationMessage, RedisSSEMessage from app.services.sse.redis_bus import SSERedisBus -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e _test_logger = logging.getLogger("test.services.sse.redis_bus") diff --git a/backend/tests/e2e/services/user_settings/test_user_settings_service.py b/backend/tests/e2e/services/user_settings/test_user_settings_service.py new file mode 100644 index 00000000..687ae104 --- /dev/null +++ b/backend/tests/e2e/services/user_settings/test_user_settings_service.py @@ -0,0 +1,566 @@ +import uuid +from datetime import datetime, timedelta, timezone + +import pytest +from app.domain.enums import Theme +from app.domain.user.settings_models import ( + DomainEditorSettings, + DomainNotificationSettings, + DomainSettingsHistoryEntry, + DomainUserSettings, + DomainUserSettingsUpdate, +) +from app.services.user_settings_service import UserSettingsService +from dishka import AsyncContainer + +pytestmark = [pytest.mark.e2e, pytest.mark.mongodb, pytest.mark.kafka] + + +def _unique_user_id() -> str: + return f"settings_user_{uuid.uuid4().hex[:8]}" + + +class TestGetUserSettings: + """Tests for get_user_settings method.""" + + @pytest.mark.asyncio + async def test_get_user_settings_new_user(self, scope: AsyncContainer) -> None: + """Get settings for new user returns defaults.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + settings = await svc.get_user_settings(user_id) + + assert isinstance(settings, DomainUserSettings) + assert settings.user_id == user_id + assert settings.theme == Theme.AUTO # Default theme + assert settings.editor is not None + assert settings.notifications is not None + + @pytest.mark.asyncio + async def test_get_user_settings_cache_hit(self, scope: AsyncContainer) -> None: + """Second get_user_settings should hit cache.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # First call - cache miss + settings1 = await svc.get_user_settings(user_id) + + # Second call - cache hit + settings2 = await svc.get_user_settings(user_id) + + assert settings1.user_id == settings2.user_id + assert settings1.theme == settings2.theme + + @pytest.mark.asyncio + async def test_get_user_settings_fresh_bypasses_cache( + self, scope: AsyncContainer + ) -> None: + """get_user_settings_fresh bypasses cache.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Get and cache + await svc.get_user_settings(user_id) + + # Fresh should still work + fresh = await svc.get_user_settings_fresh(user_id) + + assert isinstance(fresh, DomainUserSettings) + assert fresh.user_id == user_id + + +class TestUpdateUserSettings: + """Tests for update_user_settings method.""" + + @pytest.mark.asyncio + async def test_update_theme(self, scope: AsyncContainer) -> None: + """Update theme setting.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Update theme + updates = DomainUserSettingsUpdate(theme=Theme.DARK) + updated = await svc.update_user_settings(user_id, updates) + + assert updated.theme == Theme.DARK + + # Verify persistence + retrieved = await svc.get_user_settings(user_id) + assert retrieved.theme == Theme.DARK + + @pytest.mark.asyncio + async def test_update_multiple_settings(self, scope: AsyncContainer) -> None: + """Update multiple settings at once.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + updates = DomainUserSettingsUpdate( + theme=Theme.LIGHT, + timezone="Europe/London", + date_format="DD/MM/YYYY", + ) + updated = await svc.update_user_settings(user_id, updates) + + assert updated.theme == Theme.LIGHT + assert updated.timezone == "Europe/London" + assert updated.date_format == "DD/MM/YYYY" + + @pytest.mark.asyncio + async def test_update_with_reason(self, scope: AsyncContainer) -> None: + """Update settings with reason tracked.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + reason_text = "User preference" + updates = DomainUserSettingsUpdate(theme=Theme.DARK) + await svc.update_user_settings(user_id, updates, reason=reason_text) + + # Verify reason was persisted in history + history = await svc.get_settings_history(user_id) + assert isinstance(history, list) + assert len(history) > 0, "Expected at least one history entry after update" + + # Find entry with our reason + reasons_found = [entry.reason for entry in history if entry.reason == reason_text] + assert len(reasons_found) > 0, ( + f"Expected history to contain entry with reason '{reason_text}', " + f"found reasons: {[e.reason for e in history]}" + ) + + @pytest.mark.asyncio + async def test_update_increments_version(self, scope: AsyncContainer) -> None: + """Each update increments settings version.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Get initial + initial = await svc.get_user_settings(user_id) + initial_version = initial.version or 0 + + # Update + updates = DomainUserSettingsUpdate(theme=Theme.DARK) + updated = await svc.update_user_settings(user_id, updates) + + assert updated.version is not None + assert updated.version > initial_version + + @pytest.mark.asyncio + async def test_update_empty_changes_no_op(self, scope: AsyncContainer) -> None: + """Empty update is a no-op.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Get initial + initial = await svc.get_user_settings(user_id) + + # Empty update + updates = DomainUserSettingsUpdate() + result = await svc.update_user_settings(user_id, updates) + + # Should return same settings + assert result.theme == initial.theme + + +class TestUpdateTheme: + """Tests for update_theme convenience method.""" + + @pytest.mark.asyncio + async def test_update_theme_to_dark(self, scope: AsyncContainer) -> None: + """Update theme to dark.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + result = await svc.update_theme(user_id, Theme.DARK) + + assert result.theme == Theme.DARK + + @pytest.mark.asyncio + async def test_update_theme_to_light(self, scope: AsyncContainer) -> None: + """Update theme to light.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + result = await svc.update_theme(user_id, Theme.LIGHT) + + assert result.theme == Theme.LIGHT + + @pytest.mark.asyncio + async def test_update_theme_to_system(self, scope: AsyncContainer) -> None: + """Update theme to system default.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # First set to dark + await svc.update_theme(user_id, Theme.DARK) + + # Then back to auto + result = await svc.update_theme(user_id, Theme.AUTO) + + assert result.theme == Theme.AUTO + + +class TestUpdateNotificationSettings: + """Tests for update_notification_settings method.""" + + @pytest.mark.asyncio + async def test_update_notification_settings(self, scope: AsyncContainer) -> None: + """Update notification settings.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + notification_settings = DomainNotificationSettings( + execution_completed=True, + execution_failed=True, + system_updates=False, + ) + result = await svc.update_notification_settings(user_id, notification_settings) + + assert result.notifications is not None + assert result.notifications.execution_completed is True + assert result.notifications.execution_failed is True + assert result.notifications.system_updates is False + + @pytest.mark.asyncio + async def test_disable_all_notifications(self, scope: AsyncContainer) -> None: + """Disable all notifications.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + notification_settings = DomainNotificationSettings( + execution_completed=False, + execution_failed=False, + system_updates=False, + security_alerts=False, + ) + result = await svc.update_notification_settings(user_id, notification_settings) + + assert result.notifications.execution_completed is False + + +class TestUpdateEditorSettings: + """Tests for update_editor_settings method.""" + + @pytest.mark.asyncio + async def test_update_editor_tab_size(self, scope: AsyncContainer) -> None: + """Update editor tab size.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + editor_settings = DomainEditorSettings(tab_size=4) + result = await svc.update_editor_settings(user_id, editor_settings) + + assert result.editor is not None + assert result.editor.tab_size == 4 + + @pytest.mark.asyncio + async def test_update_editor_multiple_options( + self, scope: AsyncContainer + ) -> None: + """Update multiple editor settings.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + editor_settings = DomainEditorSettings( + tab_size=2, + show_line_numbers=True, + word_wrap=True, + font_size=14, + ) + result = await svc.update_editor_settings(user_id, editor_settings) + + assert result.editor.tab_size == 2 + assert result.editor.show_line_numbers is True + assert result.editor.word_wrap is True + assert result.editor.font_size == 14 + + +class TestUpdateCustomSetting: + """Tests for update_custom_setting method.""" + + @pytest.mark.asyncio + async def test_update_custom_setting_string(self, scope: AsyncContainer) -> None: + """Update custom setting with string value.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + result = await svc.update_custom_setting(user_id, "favorite_color", "blue") + + assert result.custom_settings is not None + assert result.custom_settings.get("favorite_color") == "blue" + + @pytest.mark.asyncio + async def test_update_custom_setting_number(self, scope: AsyncContainer) -> None: + """Update custom setting with number value.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + result = await svc.update_custom_setting(user_id, "max_results", 100) + + assert result.custom_settings.get("max_results") == 100 + + @pytest.mark.asyncio + async def test_update_custom_setting_boolean(self, scope: AsyncContainer) -> None: + """Update custom setting with boolean value.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + result = await svc.update_custom_setting(user_id, "beta_features", True) + + assert result.custom_settings.get("beta_features") is True + + @pytest.mark.asyncio + async def test_update_multiple_custom_settings( + self, scope: AsyncContainer + ) -> None: + """Update multiple custom settings sequentially.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + await svc.update_custom_setting(user_id, "key1", "value1") + await svc.update_custom_setting(user_id, "key2", "value2") + result = await svc.update_custom_setting(user_id, "key3", "value3") + + assert result.custom_settings.get("key1") == "value1" + assert result.custom_settings.get("key2") == "value2" + assert result.custom_settings.get("key3") == "value3" + + +class TestGetSettingsHistory: + """Tests for get_settings_history method.""" + + @pytest.mark.asyncio + async def test_get_settings_history_empty(self, scope: AsyncContainer) -> None: + """New user has no history.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + history = await svc.get_settings_history(user_id) + + assert isinstance(history, list) + + @pytest.mark.asyncio + async def test_get_settings_history_after_updates( + self, scope: AsyncContainer + ) -> None: + """History contains entries after updates.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Make some updates + await svc.update_theme(user_id, Theme.DARK) + await svc.update_theme(user_id, Theme.LIGHT) + + history = await svc.get_settings_history(user_id) + + assert isinstance(history, list) + # Should have at least some history entries + for entry in history: + assert isinstance(entry, DomainSettingsHistoryEntry) + assert entry.timestamp is not None + + @pytest.mark.asyncio + async def test_get_settings_history_with_limit( + self, scope: AsyncContainer + ) -> None: + """History respects limit parameter and returns most recent entries.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Make 5 updates sequentially + for i in range(5): + await svc.update_custom_setting(user_id, f"key_{i}", f"value_{i}") + + # Request only 3 entries + history = await svc.get_settings_history(user_id, limit=3) + + assert isinstance(history, list) + assert len(history) == 3, f"Expected 3 history entries, got {len(history)}" + + # History should return most recent entries first (key_4, key_3, key_2) + # Each entry's field contains the custom setting key path + expected_keys = ["key_4", "key_3", "key_2"] + for i, entry in enumerate(history): + assert isinstance(entry, DomainSettingsHistoryEntry) + # The field for custom settings includes the key name + assert expected_keys[i] in entry.field, ( + f"Entry {i} field '{entry.field}' should contain '{expected_keys[i]}'" + ) + assert entry.new_value == f"value_{4 - i}", ( + f"Entry {i} new_value should be 'value_{4 - i}', got '{entry.new_value}'" + ) + + +class TestRestoreSettingsToPoint: + """Tests for restore_settings_to_point method.""" + + @pytest.mark.asyncio + async def test_restore_settings_to_current_time( + self, scope: AsyncContainer + ) -> None: + """Restore to current time is effectively a no-op.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Set some settings + await svc.update_theme(user_id, Theme.DARK) + + # Restore to current time + now = datetime.now(timezone.utc) + restored = await svc.restore_settings_to_point(user_id, now) + + assert isinstance(restored, DomainUserSettings) + assert restored.user_id == user_id + + @pytest.mark.asyncio + async def test_restore_settings_to_past(self, scope: AsyncContainer) -> None: + """Restore settings to a past point reverts changes.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Capture initial default settings + initial = await svc.get_user_settings(user_id) + assert initial.theme == Theme.AUTO, "Initial theme should be AUTO (default)" + + # Make changes that alter the settings + await svc.update_theme(user_id, Theme.DARK) + await svc.update_theme(user_id, Theme.LIGHT) + + # Verify settings changed + current = await svc.get_user_settings(user_id) + assert current.theme == Theme.LIGHT, "Theme should be LIGHT after updates" + + # Restore to before all changes (epoch) + past = datetime.now(timezone.utc) - timedelta(days=365) + restored = await svc.restore_settings_to_point(user_id, past) + + # Verify restore actually reverted to initial defaults + assert isinstance(restored, DomainUserSettings) + assert restored.theme == initial.theme, ( + f"Restored theme should match initial ({initial.theme}), got {restored.theme}" + ) + assert restored.timezone == initial.timezone, ( + f"Restored timezone should match initial ({initial.timezone}), got {restored.timezone}" + ) + + +class TestCacheManagement: + """Tests for cache management methods.""" + + @pytest.mark.asyncio + async def test_invalidate_cache(self, scope: AsyncContainer) -> None: + """Invalidate cache for user.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Cache settings + await svc.get_user_settings(user_id) + + # Invalidate + await svc.invalidate_cache(user_id) + + # Should still work (cache miss) + settings = await svc.get_user_settings(user_id) + assert settings.user_id == user_id + + @pytest.mark.asyncio + async def test_get_cache_stats(self, scope: AsyncContainer) -> None: + """Get cache statistics.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + + stats = svc.get_cache_stats() + + assert isinstance(stats, dict) + assert "cache_size" in stats + assert "max_cache_size" in stats + assert "cache_ttl_seconds" in stats + assert stats["cache_size"] >= 0 + assert stats["max_cache_size"] > 0 + + +class TestResetUserSettings: + """Tests for reset_user_settings method.""" + + @pytest.mark.asyncio + async def test_reset_user_settings(self, scope: AsyncContainer) -> None: + """Reset user settings clears all data.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # Set some custom settings + await svc.update_theme(user_id, Theme.DARK) + await svc.update_custom_setting(user_id, "custom_key", "custom_value") + + # Reset + await svc.reset_user_settings(user_id) + + # Get fresh - should be defaults + settings = await svc.get_user_settings_fresh(user_id) + assert settings.theme == Theme.AUTO # Default + + +class TestSettingsIntegration: + """Integration tests for settings workflow.""" + + @pytest.mark.asyncio + async def test_full_settings_lifecycle(self, scope: AsyncContainer) -> None: + """Test complete settings lifecycle.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user_id = _unique_user_id() + + # 1. Get default settings + initial = await svc.get_user_settings(user_id) + assert initial.theme == Theme.AUTO + + # 2. Update theme + await svc.update_theme(user_id, Theme.DARK) + + # 3. Update editor settings + await svc.update_editor_settings( + user_id, DomainEditorSettings(tab_size=4, show_line_numbers=True) + ) + + # 4. Update notification settings + await svc.update_notification_settings( + user_id, DomainNotificationSettings(execution_completed=True) + ) + + # 5. Add custom setting + await svc.update_custom_setting(user_id, "language", "en") + + # 6. Verify all settings persisted + final = await svc.get_user_settings(user_id) + assert final.theme == Theme.DARK + assert final.editor.tab_size == 4 + assert final.notifications.execution_completed is True + assert final.custom_settings.get("language") == "en" + + # 7. Get history + history = await svc.get_settings_history(user_id) + assert isinstance(history, list) + + # 8. Cache stats + stats = svc.get_cache_stats() + assert stats["cache_size"] >= 0 + + @pytest.mark.asyncio + async def test_settings_isolation_between_users( + self, scope: AsyncContainer + ) -> None: + """Settings are isolated between users.""" + svc: UserSettingsService = await scope.get(UserSettingsService) + user1 = _unique_user_id() + user2 = _unique_user_id() + + # User1 prefers dark theme + await svc.update_theme(user1, Theme.DARK) + + # User2 prefers light theme + await svc.update_theme(user2, Theme.LIGHT) + + # Verify isolation + user1_settings = await svc.get_user_settings(user1) + user2_settings = await svc.get_user_settings(user2) + + assert user1_settings.theme == Theme.DARK + assert user2_settings.theme == Theme.LIGHT diff --git a/backend/tests/e2e/test_admin_events_routes.py b/backend/tests/e2e/test_admin_events_routes.py new file mode 100644 index 00000000..3ef5ba32 --- /dev/null +++ b/backend/tests/e2e/test_admin_events_routes.py @@ -0,0 +1,540 @@ +import asyncio +from typing import Any + +import pytest +from app.domain.enums.events import EventType +from app.schemas_pydantic.admin_events import ( + EventBrowseRequest, + EventBrowseResponse, + EventDeleteResponse, + EventDetailResponse, + EventFilter, + EventReplayRequest, + EventReplayResponse, + EventReplayStatusResponse, + EventStatsResponse, +) +from app.schemas_pydantic.execution import ExecutionRequest, ExecutionResponse +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.admin, pytest.mark.kafka] + + +async def wait_for_events( + client: AsyncClient, + aggregate_id: str, + timeout: float = 30.0, + poll_interval: float = 0.5, +) -> list[dict[str, Any]]: + """Poll until at least one event exists for the aggregate. + + Args: + client: Admin HTTP client + aggregate_id: Execution ID to get events for + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + List of events for the aggregate + + Raises: + TimeoutError: If no events appear within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout + + while asyncio.get_event_loop().time() < deadline: + request = EventBrowseRequest( + filters=EventFilter(aggregate_id=aggregate_id), + limit=10, + ) + response = await client.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + assert response.status_code == 200, f"Unexpected: {response.status_code} - {response.text}" + + result = EventBrowseResponse.model_validate(response.json()) + if result.events: + return result.events + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"No events appeared for aggregate {aggregate_id} within {timeout}s") + + +class TestBrowseEvents: + """Tests for POST /api/v1/admin/events/browse.""" + + @pytest.mark.asyncio + async def test_browse_events(self, test_admin: AsyncClient) -> None: + """Admin can browse events.""" + request = EventBrowseRequest( + filters=EventFilter(), + skip=0, + limit=50, + sort_by="timestamp", + sort_order=-1, + ) + response = await test_admin.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventBrowseResponse.model_validate(response.json()) + + assert result.total >= 0 + assert result.skip == 0 + assert result.limit == 50 + assert isinstance(result.events, list) + + @pytest.mark.asyncio + async def test_browse_events_with_event_type_filter( + self, test_admin: AsyncClient, created_execution_admin: ExecutionResponse + ) -> None: + """Browse events filtered by event type.""" + await wait_for_events(test_admin, created_execution_admin.execution_id) + + request = EventBrowseRequest( + filters=EventFilter(event_types=[EventType.EXECUTION_REQUESTED]), + skip=0, + limit=20, + ) + response = await test_admin.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventBrowseResponse.model_validate(response.json()) + assert isinstance(result.events, list) + assert result.total >= 1 + + @pytest.mark.asyncio + async def test_browse_events_with_pagination( + self, test_admin: AsyncClient + ) -> None: + """Pagination works for event browsing.""" + request = EventBrowseRequest( + filters=EventFilter(), + skip=10, + limit=25, + ) + response = await test_admin.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventBrowseResponse.model_validate(response.json()) + assert result.skip == 10 + assert result.limit == 25 + + @pytest.mark.asyncio + async def test_browse_events_with_aggregate_filter( + self, test_admin: AsyncClient, created_execution_admin: ExecutionResponse + ) -> None: + """Browse events filtered by aggregate ID.""" + await wait_for_events(test_admin, created_execution_admin.execution_id) + + request = EventBrowseRequest( + filters=EventFilter(aggregate_id=created_execution_admin.execution_id), + limit=50, + ) + response = await test_admin.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventBrowseResponse.model_validate(response.json()) + assert result.total >= 1 + assert len(result.events) >= 1 + + @pytest.mark.asyncio + async def test_browse_events_with_search_text( + self, test_admin: AsyncClient + ) -> None: + """Browse events with text search.""" + request = EventBrowseRequest( + filters=EventFilter(search_text="execution"), + limit=20, + ) + response = await test_admin.post( + "/api/v1/admin/events/browse", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventBrowseResponse.model_validate(response.json()) + assert isinstance(result.events, list) + + @pytest.mark.asyncio + async def test_browse_events_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot browse admin events.""" + response = await test_user.post( + "/api/v1/admin/events/browse", + json={"filters": {}, "limit": 10}, + ) + + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_browse_events_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.post( + "/api/v1/admin/events/browse", + json={"filters": {}, "limit": 10}, + ) + + assert response.status_code == 401 + + +class TestEventStats: + """Tests for GET /api/v1/admin/events/stats.""" + + @pytest.mark.asyncio + async def test_get_event_stats(self, test_admin: AsyncClient) -> None: + """Admin can get event statistics.""" + response = await test_admin.get("/api/v1/admin/events/stats") + + assert response.status_code == 200 + stats = EventStatsResponse.model_validate(response.json()) + + assert stats.total_events >= 0 + assert isinstance(stats.events_by_type, dict) + assert isinstance(stats.events_by_hour, list) + assert isinstance(stats.top_users, list) + assert stats.error_rate >= 0.0 + assert stats.avg_processing_time >= 0.0 + + @pytest.mark.asyncio + async def test_get_event_stats_with_hours( + self, test_admin: AsyncClient + ) -> None: + """Get event statistics for specific time period.""" + response = await test_admin.get( + "/api/v1/admin/events/stats", + params={"hours": 48}, + ) + + assert response.status_code == 200 + stats = EventStatsResponse.model_validate(response.json()) + assert stats.total_events >= 0 + + @pytest.mark.asyncio + async def test_get_event_stats_max_hours( + self, test_admin: AsyncClient + ) -> None: + """Get event statistics for maximum time period (168 hours).""" + response = await test_admin.get( + "/api/v1/admin/events/stats", + params={"hours": 168}, + ) + + assert response.status_code == 200 + stats = EventStatsResponse.model_validate(response.json()) + assert isinstance(stats.events_by_hour, list) + + @pytest.mark.asyncio + async def test_get_event_stats_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot get event stats.""" + response = await test_user.get("/api/v1/admin/events/stats") + + assert response.status_code == 403 + + +class TestExportEventsCSV: + """Tests for GET /api/v1/admin/events/export/csv.""" + + @pytest.mark.asyncio + async def test_export_events_csv(self, test_admin: AsyncClient) -> None: + """Admin can export events as CSV.""" + response = await test_admin.get("/api/v1/admin/events/export/csv") + + assert response.status_code == 200 + content_type = response.headers.get("content-type", "") + assert "text/csv" in content_type or "application/octet-stream" in content_type + content_disposition = response.headers.get("content-disposition", "") + assert "attachment" in content_disposition + assert ".csv" in content_disposition + + body_csv = response.text + assert "Event ID" in body_csv + assert "Timestamp" in body_csv + + @pytest.mark.asyncio + async def test_export_events_csv_with_filters( + self, test_admin: AsyncClient + ) -> None: + """Export CSV with event type filters.""" + response = await test_admin.get( + "/api/v1/admin/events/export/csv", + params={ + "event_types": [EventType.EXECUTION_REQUESTED], + "limit": 100, + }, + ) + + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_export_events_csv_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot export events.""" + response = await test_user.get("/api/v1/admin/events/export/csv") + + assert response.status_code == 403 + + +class TestExportEventsJSON: + """Tests for GET /api/v1/admin/events/export/json.""" + + @pytest.mark.asyncio + async def test_export_events_json(self, test_admin: AsyncClient) -> None: + """Admin can export events as JSON.""" + response = await test_admin.get("/api/v1/admin/events/export/json") + + assert response.status_code == 200 + content_type = response.headers.get("content-type", "") + assert "application/json" in content_type or "application/octet-stream" in content_type + content_disposition = response.headers.get("content-disposition", "") + assert "attachment" in content_disposition + assert ".json" in content_disposition + + data = response.json() + assert "export_metadata" in data + assert "events" in data + assert isinstance(data["events"], list) + assert "exported_at" in data["export_metadata"] + + @pytest.mark.asyncio + async def test_export_events_json_with_filters( + self, test_admin: AsyncClient + ) -> None: + """Export JSON with comprehensive filters.""" + response = await test_admin.get( + "/api/v1/admin/events/export/json", + params={ + "event_types": [EventType.EXECUTION_REQUESTED, EventType.EXECUTION_STARTED], + "limit": 500, + }, + ) + + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_export_events_json_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot export events.""" + response = await test_user.get("/api/v1/admin/events/export/json") + + assert response.status_code == 403 + + +class TestGetEventDetail: + """Tests for GET /api/v1/admin/events/{event_id}.""" + + @pytest.mark.asyncio + async def test_get_event_detail( + self, test_admin: AsyncClient, created_execution_admin: ExecutionResponse + ) -> None: + """Admin can get event details.""" + events = await wait_for_events(test_admin, created_execution_admin.execution_id) + event_id = events[0].get("event_id") + + response = await test_admin.get(f"/api/v1/admin/events/{event_id}") + + assert response.status_code == 200 + detail = EventDetailResponse.model_validate(response.json()) + + assert detail.event is not None + assert isinstance(detail.related_events, list) + assert isinstance(detail.timeline, list) + + @pytest.mark.asyncio + async def test_get_event_detail_not_found( + self, test_admin: AsyncClient + ) -> None: + """Get nonexistent event returns 404.""" + response = await test_admin.get( + "/api/v1/admin/events/nonexistent-event-id" + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_event_detail_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot get event details.""" + response = await test_user.get("/api/v1/admin/events/some-event-id") + + assert response.status_code == 403 + + +class TestReplayEvents: + """Tests for POST /api/v1/admin/events/replay.""" + + @pytest.mark.asyncio + async def test_replay_events_dry_run( + self, test_admin: AsyncClient, created_execution_admin: ExecutionResponse + ) -> None: + """Admin can replay events in dry run mode.""" + await wait_for_events(test_admin, created_execution_admin.execution_id) + + request = EventReplayRequest( + aggregate_id=created_execution_admin.execution_id, + dry_run=True, + ) + response = await test_admin.post( + "/api/v1/admin/events/replay", json=request.model_dump() + ) + + assert response.status_code == 200 + result = EventReplayResponse.model_validate(response.json()) + assert result.dry_run is True + assert result.total_events >= 1 + assert result.replay_correlation_id is not None + assert result.status in ["preview", "completed", "scheduled"] + + @pytest.mark.asyncio + async def test_replay_events_no_events_found( + self, test_admin: AsyncClient + ) -> None: + """Replay with non-matching filter returns 404.""" + request = EventReplayRequest( + correlation_id="nonexistent-correlation-id-12345", + dry_run=True, + ) + response = await test_admin.post( + "/api/v1/admin/events/replay", json=request.model_dump() + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_replay_events_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot replay events.""" + response = await test_user.post( + "/api/v1/admin/events/replay", + json={"aggregate_id": "test", "dry_run": True}, + ) + + assert response.status_code == 403 + + +class TestGetReplayStatus: + """Tests for GET /api/v1/admin/events/replay/{session_id}/status.""" + + @pytest.mark.asyncio + async def test_get_replay_status_not_found( + self, test_admin: AsyncClient + ) -> None: + """Get nonexistent replay session returns 404.""" + response = await test_admin.get( + "/api/v1/admin/events/replay/nonexistent-session/status" + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_replay_status_after_replay( + self, test_admin: AsyncClient, simple_execution_request: ExecutionRequest + ) -> None: + """Get replay status after starting a replay.""" + exec_response = await test_admin.post( + "/api/v1/execute", json=simple_execution_request.model_dump() + ) + assert exec_response.status_code == 200 + + execution = ExecutionResponse.model_validate(exec_response.json()) + await wait_for_events(test_admin, execution.execution_id) + + request = EventReplayRequest( + aggregate_id=execution.execution_id, + dry_run=False, + ) + replay_response = await test_admin.post( + "/api/v1/admin/events/replay", json=request.model_dump() + ) + assert replay_response.status_code == 200 + + replay_result = EventReplayResponse.model_validate(replay_response.json()) + assert replay_result.session_id is not None + + status_response = await test_admin.get( + f"/api/v1/admin/events/replay/{replay_result.session_id}/status" + ) + + assert status_response.status_code == 200 + status = EventReplayStatusResponse.model_validate(status_response.json()) + assert status.session_id == replay_result.session_id + assert status.status in ["pending", "in_progress", "completed", "failed"] + assert status.total_events >= 1 + assert status.replayed_events >= 0 + assert status.progress_percentage >= 0.0 + + @pytest.mark.asyncio + async def test_get_replay_status_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot get replay status.""" + response = await test_user.get( + "/api/v1/admin/events/replay/some-session/status" + ) + + assert response.status_code == 403 + + +class TestDeleteEvent: + """Tests for DELETE /api/v1/admin/events/{event_id}.""" + + @pytest.mark.asyncio + async def test_delete_event( + self, test_admin: AsyncClient, simple_execution_request: ExecutionRequest + ) -> None: + """Admin can delete an event.""" + exec_response = await test_admin.post( + "/api/v1/execute", json=simple_execution_request.model_dump() + ) + assert exec_response.status_code == 200 + + execution = ExecutionResponse.model_validate(exec_response.json()) + events = await wait_for_events(test_admin, execution.execution_id) + event_id = events[0].get("event_id") + + response = await test_admin.delete(f"/api/v1/admin/events/{event_id}") + + assert response.status_code == 200 + result = EventDeleteResponse.model_validate(response.json()) + assert result.event_id == event_id + assert "deleted" in result.message.lower() + + verify_response = await test_admin.get(f"/api/v1/admin/events/{event_id}") + assert verify_response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_event_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot delete events.""" + response = await test_user.delete( + "/api/v1/admin/events/some-event-id" + ) + + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_delete_event_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.delete("/api/v1/admin/events/some-event-id") + + assert response.status_code == 401 diff --git a/backend/tests/integration/test_admin_routes.py b/backend/tests/e2e/test_admin_routes.py similarity index 98% rename from backend/tests/integration/test_admin_routes.py rename to backend/tests/e2e/test_admin_routes.py index 5141986e..031eb471 100644 --- a/backend/tests/integration/test_admin_routes.py +++ b/backend/tests/e2e/test_admin_routes.py @@ -1,6 +1,7 @@ from uuid import uuid4 import pytest +from app.domain.enums.events import EventType from app.schemas_pydantic.admin_settings import ( ExecutionLimitsSchema, MonitoringSettingsSchema, @@ -11,7 +12,7 @@ from httpx import AsyncClient -@pytest.mark.integration +@pytest.mark.e2e class TestAdminSettings: """Test admin settings endpoints against real backend.""" @@ -120,7 +121,7 @@ async def test_regular_user_cannot_access_settings(self, test_user: AsyncClient) assert "admin" in error["detail"].lower() or "forbidden" in error["detail"].lower() -@pytest.mark.integration +@pytest.mark.e2e class TestAdminUsers: """Test admin user management endpoints against real backend.""" @@ -214,7 +215,7 @@ async def test_create_and_manage_user(self, test_admin: AsyncClient) -> None: assert get_deleted_response.status_code == 404 -@pytest.mark.integration +@pytest.mark.e2e class TestAdminEvents: """Test admin event management endpoints against real backend.""" @@ -224,7 +225,7 @@ async def test_browse_events(self, test_admin: AsyncClient) -> None: # Browse events browse_payload = { "filters": { - "event_types": ["user_registered", "user_logged_in"] + "event_types": [EventType.USER_REGISTERED, EventType.USER_LOGGED_IN] }, "skip": 0, "limit": 20, diff --git a/backend/tests/e2e/test_admin_settings_routes.py b/backend/tests/e2e/test_admin_settings_routes.py new file mode 100644 index 00000000..7a3dffd3 --- /dev/null +++ b/backend/tests/e2e/test_admin_settings_routes.py @@ -0,0 +1,361 @@ +"""E2E tests for admin settings routes.""" + +import pytest +from app.schemas_pydantic.admin_settings import ( + ExecutionLimitsSchema, + MonitoringSettingsSchema, + SecuritySettingsSchema, + SystemSettings, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.admin] + + +class TestGetSystemSettings: + """Tests for GET /api/v1/admin/settings/.""" + + @pytest.mark.asyncio + async def test_get_system_settings(self, test_admin: AsyncClient) -> None: + """Admin can get system settings.""" + response = await test_admin.get("/api/v1/admin/settings/") + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + + # Validate execution limits + assert settings.execution_limits is not None + assert isinstance(settings.execution_limits, ExecutionLimitsSchema) + assert settings.execution_limits.max_timeout_seconds >= 10 + assert settings.execution_limits.max_memory_mb >= 128 + assert settings.execution_limits.max_cpu_cores >= 1 + assert settings.execution_limits.max_concurrent_executions >= 1 + + # Validate security settings + assert settings.security_settings is not None + assert isinstance(settings.security_settings, SecuritySettingsSchema) + assert settings.security_settings.password_min_length >= 6 + assert settings.security_settings.session_timeout_minutes >= 5 + assert settings.security_settings.max_login_attempts >= 3 + assert settings.security_settings.lockout_duration_minutes >= 5 + + # Validate monitoring settings + assert settings.monitoring_settings is not None + assert isinstance( + settings.monitoring_settings, MonitoringSettingsSchema + ) + assert settings.monitoring_settings.metrics_retention_days >= 7 + assert settings.monitoring_settings.log_level in [ + "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" + ] + assert isinstance(settings.monitoring_settings.enable_tracing, bool) + assert 0.0 <= settings.monitoring_settings.sampling_rate <= 1.0 + + @pytest.mark.asyncio + async def test_get_system_settings_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot get system settings.""" + response = await test_user.get("/api/v1/admin/settings/") + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_get_system_settings_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/admin/settings/") + assert response.status_code == 401 + + +class TestUpdateSystemSettings: + """Tests for PUT /api/v1/admin/settings/.""" + + @pytest.mark.asyncio + async def test_update_system_settings_full( + self, test_admin: AsyncClient + ) -> None: + """Admin can update all system settings.""" + request = SystemSettings( + execution_limits=ExecutionLimitsSchema( + max_timeout_seconds=600, + max_memory_mb=1024, + max_cpu_cores=4, + max_concurrent_executions=20, + ), + security_settings=SecuritySettingsSchema( + password_min_length=10, + session_timeout_minutes=120, + max_login_attempts=5, + lockout_duration_minutes=30, + ), + monitoring_settings=MonitoringSettingsSchema( + metrics_retention_days=60, + log_level="WARNING", + enable_tracing=True, + sampling_rate=0.2, + ), + ) + response = await test_admin.put( + "/api/v1/admin/settings/", json=request.model_dump() + ) + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + + assert settings.execution_limits.max_timeout_seconds == 600 + assert settings.execution_limits.max_memory_mb == 1024 + assert settings.execution_limits.max_cpu_cores == 4 + assert settings.execution_limits.max_concurrent_executions == 20 + + assert settings.security_settings.password_min_length == 10 + assert settings.security_settings.session_timeout_minutes == 120 + + assert settings.monitoring_settings.metrics_retention_days == 60 + assert settings.monitoring_settings.log_level == "WARNING" + assert settings.monitoring_settings.sampling_rate == 0.2 + + @pytest.mark.asyncio + async def test_update_execution_limits_only( + self, test_admin: AsyncClient + ) -> None: + """Admin can update only execution limits.""" + # Get current settings first + get_response = await test_admin.get("/api/v1/admin/settings/") + current = SystemSettings.model_validate(get_response.json()) + + # Update only execution limits + new_execution_limits = ExecutionLimitsSchema( + max_timeout_seconds=300, + max_memory_mb=512, + max_cpu_cores=2, + max_concurrent_executions=15, + ) + request = SystemSettings( + execution_limits=new_execution_limits, + security_settings=current.security_settings, + monitoring_settings=current.monitoring_settings, + ) + response = await test_admin.put( + "/api/v1/admin/settings/", json=request.model_dump() + ) + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + assert settings.execution_limits.max_timeout_seconds == 300 + assert settings.execution_limits.max_concurrent_executions == 15 + + @pytest.mark.asyncio + async def test_update_security_settings_only( + self, test_admin: AsyncClient + ) -> None: + """Admin can update only security settings.""" + # Get current settings + get_response = await test_admin.get("/api/v1/admin/settings/") + current = SystemSettings.model_validate(get_response.json()) + + # Update only security settings + new_security = SecuritySettingsSchema( + password_min_length=12, + session_timeout_minutes=90, + max_login_attempts=3, + lockout_duration_minutes=20, + ) + request = SystemSettings( + execution_limits=current.execution_limits, + security_settings=new_security, + monitoring_settings=current.monitoring_settings, + ) + response = await test_admin.put( + "/api/v1/admin/settings/", json=request.model_dump() + ) + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + assert settings.security_settings.password_min_length == 12 + assert settings.security_settings.session_timeout_minutes == 90 + + @pytest.mark.asyncio + async def test_update_monitoring_settings_only( + self, test_admin: AsyncClient + ) -> None: + """Admin can update only monitoring settings.""" + # Get current settings + get_response = await test_admin.get("/api/v1/admin/settings/") + current = SystemSettings.model_validate(get_response.json()) + + # Update only monitoring settings + new_monitoring = MonitoringSettingsSchema( + metrics_retention_days=45, + log_level="DEBUG", + enable_tracing=False, + sampling_rate=0.5, + ) + request = SystemSettings( + execution_limits=current.execution_limits, + security_settings=current.security_settings, + monitoring_settings=new_monitoring, + ) + response = await test_admin.put( + "/api/v1/admin/settings/", json=request.model_dump() + ) + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + assert settings.monitoring_settings.metrics_retention_days == 45 + assert settings.monitoring_settings.log_level == "DEBUG" + assert settings.monitoring_settings.enable_tracing is False + assert settings.monitoring_settings.sampling_rate == 0.5 + + @pytest.mark.asyncio + async def test_update_system_settings_invalid_values( + self, test_admin: AsyncClient + ) -> None: + """Invalid setting values are rejected.""" + # Get current settings for partial update + get_response = await test_admin.get("/api/v1/admin/settings/") + current = SystemSettings.model_validate(get_response.json()) + + # Try with invalid timeout (too low) + response = await test_admin.put( + "/api/v1/admin/settings/", + json={ + "execution_limits": { + "max_timeout_seconds": 1, # minimum is 10 + "max_memory_mb": 512, + "max_cpu_cores": 2, + "max_concurrent_executions": 10, + }, + "security_settings": current.security_settings.model_dump(), + "monitoring_settings": current.monitoring_settings.model_dump(), + }, + ) + + assert response.status_code == 422 + + @pytest.mark.asyncio + async def test_update_system_settings_invalid_log_level( + self, test_admin: AsyncClient + ) -> None: + """Invalid log level is rejected.""" + # Get current settings + get_response = await test_admin.get("/api/v1/admin/settings/") + current = SystemSettings.model_validate(get_response.json()) + + response = await test_admin.put( + "/api/v1/admin/settings/", + json={ + "execution_limits": current.execution_limits.model_dump(), + "security_settings": current.security_settings.model_dump(), + "monitoring_settings": { + "metrics_retention_days": 30, + "log_level": "INVALID_LEVEL", # invalid + "enable_tracing": True, + "sampling_rate": 0.1, + }, + }, + ) + + assert response.status_code == 422 + + @pytest.mark.asyncio + async def test_update_system_settings_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot update system settings.""" + response = await test_user.put( + "/api/v1/admin/settings/", + json={ + "execution_limits": { + "max_timeout_seconds": 300, + "max_memory_mb": 512, + "max_cpu_cores": 2, + "max_concurrent_executions": 10, + }, + "security_settings": { + "password_min_length": 8, + "session_timeout_minutes": 60, + "max_login_attempts": 5, + "lockout_duration_minutes": 15, + }, + "monitoring_settings": { + "metrics_retention_days": 30, + "log_level": "INFO", + "enable_tracing": True, + "sampling_rate": 0.1, + }, + }, + ) + assert response.status_code == 403 + + +class TestResetSystemSettings: + """Tests for POST /api/v1/admin/settings/reset.""" + + @pytest.mark.asyncio + async def test_reset_system_settings( + self, test_admin: AsyncClient + ) -> None: + """Admin can reset system settings to defaults.""" + # First modify settings + await test_admin.put( + "/api/v1/admin/settings/", + json={ + "execution_limits": { + "max_timeout_seconds": 600, + "max_memory_mb": 2048, + "max_cpu_cores": 8, + "max_concurrent_executions": 50, + }, + "security_settings": { + "password_min_length": 16, + "session_timeout_minutes": 240, + "max_login_attempts": 10, + "lockout_duration_minutes": 60, + }, + "monitoring_settings": { + "metrics_retention_days": 90, + "log_level": "DEBUG", + "enable_tracing": False, + "sampling_rate": 0.9, + }, + }, + ) + + # Reset to defaults + response = await test_admin.post("/api/v1/admin/settings/reset") + + assert response.status_code == 200 + settings = SystemSettings.model_validate(response.json()) + + # Check that settings are reset to defaults + assert settings.execution_limits.max_timeout_seconds == 300 + assert settings.execution_limits.max_memory_mb == 512 + assert settings.execution_limits.max_cpu_cores == 2 + assert settings.execution_limits.max_concurrent_executions == 10 + + assert settings.security_settings.password_min_length == 8 + assert settings.security_settings.session_timeout_minutes == 60 + assert settings.security_settings.max_login_attempts == 5 + assert settings.security_settings.lockout_duration_minutes == 15 + + assert settings.monitoring_settings.metrics_retention_days == 30 + assert settings.monitoring_settings.log_level == "INFO" + assert settings.monitoring_settings.enable_tracing is True + assert settings.monitoring_settings.sampling_rate == 0.1 + + @pytest.mark.asyncio + async def test_reset_system_settings_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot reset system settings.""" + response = await test_user.post("/api/v1/admin/settings/reset") + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_reset_system_settings_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.post("/api/v1/admin/settings/reset") + assert response.status_code == 401 diff --git a/backend/tests/e2e/test_admin_users_routes.py b/backend/tests/e2e/test_admin_users_routes.py new file mode 100644 index 00000000..d82f50a6 --- /dev/null +++ b/backend/tests/e2e/test_admin_users_routes.py @@ -0,0 +1,767 @@ +import uuid + +import pytest +from app.domain.enums.user import UserRole +from app.schemas_pydantic.admin_user_overview import ( + AdminUserOverview, + DerivedCounts, + RateLimitSummary, +) +from app.schemas_pydantic.user import ( + DeleteUserResponse, + MessageResponse, + PasswordResetRequest, + RateLimitUpdateRequest, + RateLimitUpdateResponse, + UserCreate, + UserListResponse, + UserRateLimitsResponse, + UserResponse, + UserUpdate, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.admin] + + +def make_user_create(prefix: str, role: UserRole = UserRole.USER) -> UserCreate: + """Helper to create UserCreate with unique username/email.""" + uid = uuid.uuid4().hex[:8] + return UserCreate( + username=f"{prefix}_{uid}", + email=f"{prefix}_{uid}@example.com", + password="password123", + role=role, + ) + + +class TestListUsers: + """Tests for GET /api/v1/admin/users/.""" + + @pytest.mark.asyncio + async def test_list_users(self, test_admin: AsyncClient) -> None: + """Admin can list all users.""" + response = await test_admin.get("/api/v1/admin/users/") + + assert response.status_code == 200 + result = UserListResponse.model_validate(response.json()) + + assert result.total >= 0 + assert result.offset == 0 + assert result.limit == 100 # default + assert isinstance(result.users, list) + + @pytest.mark.asyncio + async def test_list_users_with_pagination( + self, test_admin: AsyncClient + ) -> None: + """Pagination parameters work correctly.""" + response = await test_admin.get( + "/api/v1/admin/users/", + params={"limit": 10, "offset": 0}, + ) + + assert response.status_code == 200 + result = UserListResponse.model_validate(response.json()) + assert result.limit == 10 + assert result.offset == 0 + + @pytest.mark.asyncio + async def test_list_users_with_search( + self, test_admin: AsyncClient + ) -> None: + """Search filter works correctly.""" + response = await test_admin.get( + "/api/v1/admin/users/", + params={"search": "test"}, + ) + + assert response.status_code == 200 + result = UserListResponse.model_validate(response.json()) + assert isinstance(result.users, list) + + @pytest.mark.asyncio + async def test_list_users_with_role_filter( + self, test_admin: AsyncClient + ) -> None: + """Role filter works correctly.""" + response = await test_admin.get( + "/api/v1/admin/users/", + params={"role": UserRole.USER}, + ) + + assert response.status_code == 200 + result = UserListResponse.model_validate(response.json()) + + for user in result.users: + assert user.role == UserRole.USER + + @pytest.mark.asyncio + async def test_list_users_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot list users.""" + response = await test_user.get("/api/v1/admin/users/") + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_list_users_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/admin/users/") + assert response.status_code == 401 + + +class TestCreateUser: + """Tests for POST /api/v1/admin/users/.""" + + @pytest.mark.asyncio + async def test_create_user(self, test_admin: AsyncClient) -> None: + """Admin can create a new user.""" + request = make_user_create("newuser") + response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + + assert response.status_code == 200 + raw_data = response.json() + user = UserResponse.model_validate(raw_data) + + assert user.user_id is not None + assert user.username == request.username + assert user.email == request.email + assert user.role == UserRole.USER + assert user.is_active is True + assert user.created_at is not None + assert user.updated_at is not None + + # Security: password must not be exposed in response + assert "password" not in raw_data + assert "hashed_password" not in raw_data + + @pytest.mark.asyncio + async def test_create_admin_user(self, test_admin: AsyncClient) -> None: + """Admin can create another admin user.""" + request = make_user_create("newadmin", role=UserRole.ADMIN) + response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + + assert response.status_code == 200 + user = UserResponse.model_validate(response.json()) + assert user.role == UserRole.ADMIN + + @pytest.mark.asyncio + async def test_create_user_duplicate_username( + self, test_admin: AsyncClient + ) -> None: + """Cannot create user with duplicate username.""" + uid = uuid.uuid4().hex[:8] + first_user = UserCreate( + username=f"duplicate_{uid}", + email=f"first_{uid}@example.com", + password="password123", + role=UserRole.USER, + ) + + # Create first user and verify success + first_response = await test_admin.post( + "/api/v1/admin/users/", + json=first_user.model_dump(), + ) + assert first_response.status_code == 200 + created_user = UserResponse.model_validate(first_response.json()) + assert created_user.username == first_user.username + + # Try to create second user with same username + duplicate_user = UserCreate( + username=f"duplicate_{uid}", + email=f"second_{uid}@example.com", + password="password123", + role=UserRole.USER, + ) + response = await test_admin.post( + "/api/v1/admin/users/", + json=duplicate_user.model_dump(), + ) + + assert response.status_code == 400 + + @pytest.mark.asyncio + async def test_create_user_invalid_password( + self, test_admin: AsyncClient + ) -> None: + """Cannot create user with too short password.""" + response = await test_admin.post( + "/api/v1/admin/users/", + json={ + "username": "shortpw", + "email": "shortpw@example.com", + "password": "short", # less than 8 chars + "role": UserRole.USER, + }, + ) + + assert response.status_code == 422 + + @pytest.mark.asyncio + async def test_create_user_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot create users.""" + response = await test_user.post( + "/api/v1/admin/users/", + json={ + "username": "forbidden", + "email": "forbidden@example.com", + "password": "password123", + "role": UserRole.USER, + }, + ) + assert response.status_code == 403 + + +class TestGetUser: + """Tests for GET /api/v1/admin/users/{user_id}.""" + + @pytest.mark.asyncio + async def test_get_user(self, test_admin: AsyncClient) -> None: + """Admin can get a specific user.""" + # Create user first + request = make_user_create("getuser") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + created_user = UserResponse.model_validate(create_response.json()) + + # Get user + response = await test_admin.get( + f"/api/v1/admin/users/{created_user.user_id}" + ) + + assert response.status_code == 200 + user = UserResponse.model_validate(response.json()) + + assert user.user_id == created_user.user_id + assert user.username == request.username + assert user.email == request.email + + @pytest.mark.asyncio + async def test_get_user_not_found(self, test_admin: AsyncClient) -> None: + """Get nonexistent user returns 404.""" + response = await test_admin.get( + "/api/v1/admin/users/nonexistent-user-id" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_user_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot get user details.""" + # Create user as admin first + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try to get as regular user + response = await test_user.get(f"/api/v1/admin/users/{user_id}") + assert response.status_code == 403 + + +class TestGetUserOverview: + """Tests for GET /api/v1/admin/users/{user_id}/overview.""" + + @pytest.mark.asyncio + async def test_get_user_overview(self, test_admin: AsyncClient) -> None: + """Admin can get user overview.""" + # Create user first + request = make_user_create("overview") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Get overview + response = await test_admin.get( + f"/api/v1/admin/users/{user_id}/overview" + ) + + assert response.status_code == 200 + overview = AdminUserOverview.model_validate(response.json()) + + assert overview.user.user_id == user_id + assert overview.stats is not None + assert overview.stats.total_events >= 0 + + # Validate derived counts + assert isinstance(overview.derived_counts, DerivedCounts) + assert overview.derived_counts.succeeded >= 0 + assert overview.derived_counts.failed >= 0 + assert overview.derived_counts.timeout >= 0 + assert overview.derived_counts.cancelled >= 0 + + # Validate rate limit summary + assert isinstance(overview.rate_limit_summary, RateLimitSummary) + assert isinstance(overview.recent_events, list) + + @pytest.mark.asyncio + async def test_get_user_overview_not_found( + self, test_admin: AsyncClient + ) -> None: + """Get overview for nonexistent user returns 404.""" + response = await test_admin.get( + "/api/v1/admin/users/nonexistent-user-id/overview" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_user_overview_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot get user overview.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try as regular user + response = await test_user.get( + f"/api/v1/admin/users/{user_id}/overview" + ) + assert response.status_code == 403 + + +class TestUpdateUser: + """Tests for PUT /api/v1/admin/users/{user_id}.""" + + @pytest.mark.asyncio + async def test_update_user_username( + self, test_admin: AsyncClient + ) -> None: + """Admin can update user's username.""" + # Create user + request = make_user_create("original") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user = UserResponse.model_validate(create_response.json()) + + # Update username + update = UserUpdate(username="updated_username") + response = await test_admin.put( + f"/api/v1/admin/users/{user.user_id}", + json=update.model_dump(exclude_none=True), + ) + + assert response.status_code == 200 + updated = UserResponse.model_validate(response.json()) + assert updated.username == "updated_username" + assert updated.updated_at > user.updated_at + + @pytest.mark.asyncio + async def test_update_user_role(self, test_admin: AsyncClient) -> None: + """Admin can update user's role.""" + # Create user + request = make_user_create("roletest") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Update role + update = UserUpdate(role=UserRole.ADMIN) + response = await test_admin.put( + f"/api/v1/admin/users/{user_id}", + json=update.model_dump(exclude_none=True), + ) + + assert response.status_code == 200 + updated = UserResponse.model_validate(response.json()) + assert updated.role == UserRole.ADMIN + + @pytest.mark.asyncio + async def test_update_user_deactivate( + self, test_admin: AsyncClient + ) -> None: + """Admin can deactivate a user.""" + # Create user + request = make_user_create("deactivate") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Deactivate + update = UserUpdate(is_active=False) + response = await test_admin.put( + f"/api/v1/admin/users/{user_id}", + json=update.model_dump(exclude_none=True), + ) + + assert response.status_code == 200 + updated = UserResponse.model_validate(response.json()) + assert updated.is_active is False + + @pytest.mark.asyncio + async def test_update_user_not_found( + self, test_admin: AsyncClient + ) -> None: + """Update nonexistent user returns 404.""" + response = await test_admin.put( + "/api/v1/admin/users/nonexistent-user-id", + json={"username": "test"}, + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_update_user_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot update other users.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try to update as regular user (raw dict - testing invalid access) + response = await test_user.put( + f"/api/v1/admin/users/{user_id}", + json={"username": "hacked"}, + ) + assert response.status_code == 403 + + +class TestDeleteUser: + """Tests for DELETE /api/v1/admin/users/{user_id}.""" + + @pytest.mark.asyncio + async def test_delete_user(self, test_admin: AsyncClient) -> None: + """Admin can delete a user.""" + # Create user + request = make_user_create("delete") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Delete + response = await test_admin.delete( + f"/api/v1/admin/users/{user_id}" + ) + + assert response.status_code == 200 + result = DeleteUserResponse.model_validate(response.json()) + assert user_id in result.message + assert result.user_deleted is True + + # Verify deleted + get_response = await test_admin.get(f"/api/v1/admin/users/{user_id}") + assert get_response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_user_cascade(self, test_admin: AsyncClient) -> None: + """Delete user with cascade option.""" + # Create user + request = make_user_create("cascade") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Delete with cascade + response = await test_admin.delete( + f"/api/v1/admin/users/{user_id}", + params={"cascade": True}, + ) + + assert response.status_code == 200 + result = DeleteUserResponse.model_validate(response.json()) + assert result.user_deleted is True + + @pytest.mark.asyncio + async def test_delete_user_not_found( + self, test_admin: AsyncClient + ) -> None: + """Delete nonexistent user returns error.""" + response = await test_admin.delete( + "/api/v1/admin/users/nonexistent-user-id" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_self_forbidden( + self, test_admin: AsyncClient + ) -> None: + """Admin cannot delete their own account.""" + # Get admin's own user_id + me_response = await test_admin.get("/api/v1/auth/me") + admin_user_id = me_response.json()["user_id"] + + # Try to delete self + response = await test_admin.delete( + f"/api/v1/admin/users/{admin_user_id}" + ) + assert response.status_code == 400 + + @pytest.mark.asyncio + async def test_delete_user_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot delete users.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try to delete as regular user + response = await test_user.delete(f"/api/v1/admin/users/{user_id}") + assert response.status_code == 403 + + +class TestResetPassword: + """Tests for POST /api/v1/admin/users/{user_id}/reset-password.""" + + @pytest.mark.asyncio + async def test_reset_password(self, test_admin: AsyncClient) -> None: + """Admin can reset user's password.""" + # Create user + request = make_user_create("pwreset") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Reset password + reset_request = PasswordResetRequest(new_password="newpassword456") + response = await test_admin.post( + f"/api/v1/admin/users/{user_id}/reset-password", + json=reset_request.model_dump(), + ) + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert "reset" in result.message.lower() + assert user_id in result.message + + @pytest.mark.asyncio + async def test_reset_password_short_password( + self, test_admin: AsyncClient + ) -> None: + """Cannot reset to password shorter than 8 chars.""" + # Create user + request = make_user_create("shortpw") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try to reset with short password (raw dict - testing validation error) + response = await test_admin.post( + f"/api/v1/admin/users/{user_id}/reset-password", + json={"new_password": "short"}, + ) + + assert response.status_code == 422 + + @pytest.mark.asyncio + async def test_reset_password_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot reset passwords.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try as regular user (raw dict - testing forbidden access) + response = await test_user.post( + f"/api/v1/admin/users/{user_id}/reset-password", + json={"new_password": "newpassword123"}, + ) + assert response.status_code == 403 + + +class TestGetUserRateLimits: + """Tests for GET /api/v1/admin/users/{user_id}/rate-limits.""" + + @pytest.mark.asyncio + async def test_get_user_rate_limits( + self, test_admin: AsyncClient + ) -> None: + """Admin can get user's rate limits.""" + # Create user + request = make_user_create("ratelimit") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Get rate limits + response = await test_admin.get( + f"/api/v1/admin/users/{user_id}/rate-limits" + ) + + assert response.status_code == 200 + result = UserRateLimitsResponse.model_validate(response.json()) + + assert result.user_id == user_id + assert isinstance(result.current_usage, dict) + + @pytest.mark.asyncio + async def test_get_user_rate_limits_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot get rate limits.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try as regular user + response = await test_user.get( + f"/api/v1/admin/users/{user_id}/rate-limits" + ) + assert response.status_code == 403 + + +class TestUpdateUserRateLimits: + """Tests for PUT /api/v1/admin/users/{user_id}/rate-limits.""" + + @pytest.mark.asyncio + async def test_update_user_rate_limits( + self, test_admin: AsyncClient + ) -> None: + """Admin can update user's rate limits.""" + # Create user + request = make_user_create("updatelimit") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Update rate limits + update_request = RateLimitUpdateRequest( + bypass_rate_limit=False, + global_multiplier=1.5, + rules=[], + ) + response = await test_admin.put( + f"/api/v1/admin/users/{user_id}/rate-limits", + json=update_request.model_dump(), + ) + + assert response.status_code == 200 + result = RateLimitUpdateResponse.model_validate(response.json()) + + assert result.user_id == user_id + assert result.updated is True + assert result.config is not None + assert result.config.global_multiplier == 1.5 + + @pytest.mark.asyncio + async def test_update_user_rate_limits_bypass( + self, test_admin: AsyncClient + ) -> None: + """Admin can enable rate limit bypass for user.""" + # Create user + request = make_user_create("bypass") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Enable bypass + update_request = RateLimitUpdateRequest( + bypass_rate_limit=True, + global_multiplier=1.0, + rules=[], + ) + response = await test_admin.put( + f"/api/v1/admin/users/{user_id}/rate-limits", + json=update_request.model_dump(), + ) + + assert response.status_code == 200 + result = RateLimitUpdateResponse.model_validate(response.json()) + assert result.config.bypass_rate_limit is True + + @pytest.mark.asyncio + async def test_update_user_rate_limits_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot update rate limits.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try as regular user (raw dict - testing forbidden access) + response = await test_user.put( + f"/api/v1/admin/users/{user_id}/rate-limits", + json={ + "bypass_rate_limit": True, + "global_multiplier": 2.0, + "rules": [], + }, + ) + assert response.status_code == 403 + + +class TestResetUserRateLimits: + """Tests for POST /api/v1/admin/users/{user_id}/rate-limits/reset.""" + + @pytest.mark.asyncio + async def test_reset_user_rate_limits( + self, test_admin: AsyncClient + ) -> None: + """Admin can reset user's rate limits.""" + # Create user + request = make_user_create("resetlimit") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Reset rate limits + response = await test_admin.post( + f"/api/v1/admin/users/{user_id}/rate-limits/reset" + ) + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert "reset" in result.message.lower() + assert user_id in result.message + + @pytest.mark.asyncio + async def test_reset_user_rate_limits_forbidden_for_regular_user( + self, test_user: AsyncClient, test_admin: AsyncClient + ) -> None: + """Regular user cannot reset rate limits.""" + # Create user as admin + request = make_user_create("target") + create_response = await test_admin.post( + "/api/v1/admin/users/", json=request.model_dump() + ) + user_id = create_response.json()["user_id"] + + # Try as regular user + response = await test_user.post( + f"/api/v1/admin/users/{user_id}/rate-limits/reset" + ) + assert response.status_code == 403 diff --git a/backend/tests/integration/test_alertmanager.py b/backend/tests/e2e/test_alertmanager.py similarity index 97% rename from backend/tests/integration/test_alertmanager.py rename to backend/tests/e2e/test_alertmanager.py index d2e3f21d..62068d43 100644 --- a/backend/tests/integration/test_alertmanager.py +++ b/backend/tests/e2e/test_alertmanager.py @@ -3,7 +3,7 @@ import httpx import pytest -pytestmark = pytest.mark.integration +pytestmark = pytest.mark.e2e @pytest.mark.asyncio diff --git a/backend/tests/e2e/test_auth_routes.py b/backend/tests/e2e/test_auth_routes.py new file mode 100644 index 00000000..478329fd --- /dev/null +++ b/backend/tests/e2e/test_auth_routes.py @@ -0,0 +1,228 @@ +import uuid + +import pytest +from app.domain.enums.user import UserRole +from app.schemas_pydantic.user import ( + LoginResponse, + MessageResponse, + TokenValidationResponse, + UserCreate, + UserResponse, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + + +class TestAuthLogin: + """Tests for POST /api/v1/auth/login.""" + + @pytest.mark.asyncio + async def test_login_success( + self, client: AsyncClient, new_user_request: UserCreate + ) -> None: + """Login with valid credentials returns LoginResponse.""" + await client.post("/api/v1/auth/register", json=new_user_request.model_dump()) + + response = await client.post( + "/api/v1/auth/login", + data={"username": new_user_request.username, "password": new_user_request.password}, + ) + + assert response.status_code == 200 + result = LoginResponse.model_validate(response.json()) + + assert result.username == new_user_request.username + assert result.role in [UserRole.USER, UserRole.ADMIN] + assert result.csrf_token and len(result.csrf_token) > 0 + assert result.message == "Login successful" + assert "access_token" in response.cookies + assert "csrf_token" in response.cookies + + @pytest.mark.asyncio + async def test_login_invalid_password( + self, client: AsyncClient, new_user_request: UserCreate + ) -> None: + """Invalid password returns 401 with error detail.""" + await client.post("/api/v1/auth/register", json=new_user_request.model_dump()) + + # Login with wrong password + response = await client.post( + "/api/v1/auth/login", + data={"username": new_user_request.username, "password": "WrongPass123!"}, + ) + + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid credentials" + + @pytest.mark.asyncio + async def test_login_nonexistent_user(self, client: AsyncClient) -> None: + """Nonexistent user returns 401.""" + response = await client.post( + "/api/v1/auth/login", + data={"username": "nonexistent_user_xyz", "password": "whatever"}, + ) + + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid credentials" + + @pytest.mark.asyncio + async def test_login_empty_credentials(self, client: AsyncClient) -> None: + """Empty username/password returns 422 validation error.""" + response = await client.post("/api/v1/auth/login", data={}) + + assert response.status_code == 422 + + +class TestAuthRegister: + """Tests for POST /api/v1/auth/register.""" + + @pytest.mark.asyncio + async def test_register_success( + self, client: AsyncClient, new_user_request: UserCreate + ) -> None: + """Register new user returns UserResponse with all fields.""" + response = await client.post( + "/api/v1/auth/register", json=new_user_request.model_dump() + ) + + assert response.status_code == 200 + result = UserResponse.model_validate(response.json()) + + # Validate UUID format + uuid.UUID(result.user_id) + assert result.username == new_user_request.username + assert result.email == new_user_request.email + assert result.role == UserRole.USER + assert result.is_superuser is False + assert result.created_at is not None + assert result.updated_at is not None + + @pytest.mark.asyncio + async def test_register_duplicate_username( + self, client: AsyncClient, new_user_request: UserCreate + ) -> None: + """Duplicate username returns 400.""" + await client.post("/api/v1/auth/register", json=new_user_request.model_dump()) + + # Try same username with different email + second_request = UserCreate( + username=new_user_request.username, + email=f"other_{uuid.uuid4().hex[:8]}@test.com", + password="Pass123!", + role=UserRole.USER, + ) + response = await client.post( + "/api/v1/auth/register", json=second_request.model_dump() + ) + + assert response.status_code == 400 + assert response.json()["detail"] == "Username already registered" + + @pytest.mark.asyncio + async def test_register_duplicate_email( + self, client: AsyncClient, new_user_request: UserCreate + ) -> None: + """Duplicate email returns 409.""" + await client.post("/api/v1/auth/register", json=new_user_request.model_dump()) + + # Try same email with different username + second_request = UserCreate( + username=f"other_{uuid.uuid4().hex[:8]}", + email=new_user_request.email, + password="Pass123!", + role=UserRole.USER, + ) + response = await client.post( + "/api/v1/auth/register", json=second_request.model_dump() + ) + + assert response.status_code == 409 + assert response.json()["detail"] == "Email already registered" + + @pytest.mark.asyncio + async def test_register_invalid_email_format(self, client: AsyncClient) -> None: + """Invalid email format returns 422.""" + uid = uuid.uuid4().hex[:8] + + response = await client.post( + "/api/v1/auth/register", + json={ + "username": f"user_{uid}", + "email": "not-an-email", + "password": "Pass123!", + "role": "user", + }, + ) + + assert response.status_code == 422 + + +class TestAuthMe: + """Tests for GET /api/v1/auth/me.""" + + @pytest.mark.asyncio + async def test_get_profile_authenticated(self, test_user: AsyncClient) -> None: + """Authenticated user gets their profile.""" + response = await test_user.get("/api/v1/auth/me") + + assert response.status_code == 200 + result = UserResponse.model_validate(response.json()) + + assert result.user_id is not None + assert result.username is not None + assert result.email is not None + assert result.role in [UserRole.USER, UserRole.ADMIN] + assert response.headers.get("Cache-Control") == "no-store" + + @pytest.mark.asyncio + async def test_get_profile_unauthenticated(self, client: AsyncClient) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/auth/me") + + assert response.status_code == 401 + + +class TestAuthVerifyToken: + """Tests for GET /api/v1/auth/verify-token.""" + + @pytest.mark.asyncio + async def test_verify_valid_token(self, test_user: AsyncClient) -> None: + """Valid token returns TokenValidationResponse with valid=True.""" + response = await test_user.get("/api/v1/auth/verify-token") + + assert response.status_code == 200 + result = TokenValidationResponse.model_validate(response.json()) + + assert result.valid is True + assert result.username is not None + assert result.role in [UserRole.USER, UserRole.ADMIN] + assert result.csrf_token is not None + + @pytest.mark.asyncio + async def test_verify_invalid_token(self, client: AsyncClient) -> None: + """Invalid/missing token returns 401.""" + response = await client.get("/api/v1/auth/verify-token") + + assert response.status_code == 401 + + +class TestAuthLogout: + """Tests for POST /api/v1/auth/logout.""" + + @pytest.mark.asyncio + async def test_logout_success(self, test_user: AsyncClient) -> None: + """Logout returns success message and clears cookies.""" + response = await test_user.post("/api/v1/auth/logout") + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert result.message == "Logout successful" + + @pytest.mark.asyncio + async def test_logout_unauthenticated(self, client: AsyncClient) -> None: + """Logout without auth still succeeds (idempotent).""" + response = await client.post("/api/v1/auth/logout") + + # Logout is typically idempotent - should succeed even without auth + assert response.status_code == 200 diff --git a/backend/tests/e2e/test_dlq_routes.py b/backend/tests/e2e/test_dlq_routes.py new file mode 100644 index 00000000..3c1c24bc --- /dev/null +++ b/backend/tests/e2e/test_dlq_routes.py @@ -0,0 +1,370 @@ +import pytest +from app.dlq.models import DLQMessageStatus, RetryStrategy +from app.domain.enums.events import EventType +from app.schemas_pydantic.dlq import ( + DLQBatchRetryResponse, + DLQMessageDetail, + DLQMessagesResponse, + DLQStats, + DLQTopicSummaryResponse, +) +from app.schemas_pydantic.user import MessageResponse +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +class TestGetDLQStats: + """Tests for GET /api/v1/dlq/stats.""" + + @pytest.mark.asyncio + async def test_get_dlq_stats(self, test_user: AsyncClient) -> None: + """Get DLQ statistics.""" + response = await test_user.get("/api/v1/dlq/stats") + + assert response.status_code == 200 + stats = DLQStats.model_validate(response.json()) + + assert isinstance(stats.by_status, dict) + assert isinstance(stats.by_topic, list) + assert isinstance(stats.by_event_type, list) + assert stats.age_stats is not None + assert stats.timestamp is not None + + @pytest.mark.asyncio + async def test_get_dlq_stats_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/dlq/stats") + assert response.status_code == 401 + + +class TestGetDLQMessages: + """Tests for GET /api/v1/dlq/messages.""" + + @pytest.mark.asyncio + async def test_get_dlq_messages(self, test_user: AsyncClient) -> None: + """Get DLQ messages list.""" + response = await test_user.get("/api/v1/dlq/messages") + + assert response.status_code == 200 + result = DLQMessagesResponse.model_validate(response.json()) + + assert result.total >= 0 + assert result.offset == 0 + assert result.limit == 50 # default + assert isinstance(result.messages, list) + + @pytest.mark.asyncio + async def test_get_dlq_messages_with_pagination( + self, test_user: AsyncClient + ) -> None: + """Pagination parameters work correctly.""" + response = await test_user.get( + "/api/v1/dlq/messages", + params={"limit": 10, "offset": 0}, + ) + + assert response.status_code == 200 + result = DLQMessagesResponse.model_validate(response.json()) + assert result.limit == 10 + assert result.offset == 0 + + @pytest.mark.asyncio + async def test_get_dlq_messages_by_status( + self, test_user: AsyncClient + ) -> None: + """Filter DLQ messages by status.""" + response = await test_user.get( + "/api/v1/dlq/messages", + params={"status": DLQMessageStatus.PENDING}, + ) + + assert response.status_code == 200 + result = DLQMessagesResponse.model_validate(response.json()) + + for msg in result.messages: + assert msg.status == DLQMessageStatus.PENDING + + @pytest.mark.asyncio + async def test_get_dlq_messages_by_topic( + self, test_user: AsyncClient + ) -> None: + """Filter DLQ messages by topic.""" + response = await test_user.get( + "/api/v1/dlq/messages", + params={"topic": "execution-events"}, + ) + + assert response.status_code == 200 + result = DLQMessagesResponse.model_validate(response.json()) + assert isinstance(result.messages, list) + + @pytest.mark.asyncio + async def test_get_dlq_messages_by_event_type( + self, test_user: AsyncClient + ) -> None: + """Filter DLQ messages by event type.""" + response = await test_user.get( + "/api/v1/dlq/messages", + params={"event_type": EventType.EXECUTION_REQUESTED}, + ) + + assert response.status_code == 200 + result = DLQMessagesResponse.model_validate(response.json()) + assert isinstance(result.messages, list) + + +class TestGetDLQMessage: + """Tests for GET /api/v1/dlq/messages/{event_id}.""" + + @pytest.mark.asyncio + async def test_get_dlq_message_not_found( + self, test_user: AsyncClient + ) -> None: + """Get nonexistent DLQ message returns 404.""" + response = await test_user.get( + "/api/v1/dlq/messages/nonexistent-event-id" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_dlq_message_detail( + self, test_user: AsyncClient + ) -> None: + """Get DLQ message detail if messages exist.""" + # First list messages to find one + list_response = await test_user.get( + "/api/v1/dlq/messages", + params={"limit": 1}, + ) + + if list_response.status_code == 200: + result = DLQMessagesResponse.model_validate(list_response.json()) + if result.messages: + event_id = result.messages[0].event.event_id + + # Get detail + response = await test_user.get( + f"/api/v1/dlq/messages/{event_id}" + ) + + if response.status_code == 200: + detail = DLQMessageDetail.model_validate(response.json()) + assert detail.event is not None + assert detail.original_topic is not None + assert detail.error is not None + assert detail.retry_count >= 0 + assert detail.failed_at is not None + assert detail.status is not None + + +class TestRetryDLQMessages: + """Tests for POST /api/v1/dlq/retry.""" + + @pytest.mark.asyncio + async def test_retry_dlq_messages(self, test_user: AsyncClient) -> None: + """Retry DLQ messages.""" + # First list messages to find some + list_response = await test_user.get( + "/api/v1/dlq/messages", + params={"status": DLQMessageStatus.PENDING, "limit": 5}, + ) + + if list_response.status_code == 200: + result = DLQMessagesResponse.model_validate(list_response.json()) + if result.messages: + event_ids = [msg.event.event_id for msg in result.messages[:2]] + + # Retry + response = await test_user.post( + "/api/v1/dlq/retry", + json={"event_ids": event_ids}, + ) + + assert response.status_code == 200 + retry_result = DLQBatchRetryResponse.model_validate( + response.json() + ) + + assert retry_result.total >= 0 + assert retry_result.successful >= 0 + assert retry_result.failed >= 0 + assert isinstance(retry_result.details, list) + + @pytest.mark.asyncio + async def test_retry_dlq_messages_empty_list( + self, test_user: AsyncClient + ) -> None: + """Retry with empty event IDs list.""" + response = await test_user.post( + "/api/v1/dlq/retry", + json={"event_ids": []}, + ) + + assert response.status_code == 200 + result = DLQBatchRetryResponse.model_validate(response.json()) + assert result.total == 0 + + @pytest.mark.asyncio + async def test_retry_dlq_messages_nonexistent( + self, test_user: AsyncClient + ) -> None: + """Retry nonexistent messages.""" + response = await test_user.post( + "/api/v1/dlq/retry", + json={"event_ids": ["nonexistent-1", "nonexistent-2"]}, + ) + + # May succeed with failures reported in details + assert response.status_code == 200 + result = DLQBatchRetryResponse.model_validate(response.json()) + assert isinstance(result.details, list) + + +class TestSetRetryPolicy: + """Tests for POST /api/v1/dlq/retry-policy.""" + + @pytest.mark.asyncio + async def test_set_retry_policy(self, test_user: AsyncClient) -> None: + """Set retry policy for a topic.""" + response = await test_user.post( + "/api/v1/dlq/retry-policy", + json={ + "topic": "execution-events", + "strategy": RetryStrategy.EXPONENTIAL_BACKOFF, + "max_retries": 5, + "base_delay_seconds": 60.0, + "max_delay_seconds": 3600.0, + "retry_multiplier": 2.0, + }, + ) + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert "execution-events" in result.message + + @pytest.mark.asyncio + async def test_set_retry_policy_fixed_strategy( + self, test_user: AsyncClient + ) -> None: + """Set retry policy with fixed strategy.""" + response = await test_user.post( + "/api/v1/dlq/retry-policy", + json={ + "topic": "test-topic", + "strategy": RetryStrategy.FIXED_INTERVAL, + "max_retries": 3, + "base_delay_seconds": 30.0, + "max_delay_seconds": 300.0, + "retry_multiplier": 1.0, + }, + ) + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert "test-topic" in result.message + + @pytest.mark.asyncio + async def test_set_retry_policy_scheduled_strategy( + self, test_user: AsyncClient + ) -> None: + """Set retry policy with scheduled strategy.""" + response = await test_user.post( + "/api/v1/dlq/retry-policy", + json={ + "topic": "notifications-topic", + "strategy": RetryStrategy.SCHEDULED, + "max_retries": 10, + "base_delay_seconds": 120.0, + "max_delay_seconds": 7200.0, + "retry_multiplier": 1.5, + }, + ) + + assert response.status_code == 200 + result = MessageResponse.model_validate(response.json()) + assert "notifications-topic" in result.message + + +class TestDiscardDLQMessage: + """Tests for DELETE /api/v1/dlq/messages/{event_id}.""" + + @pytest.mark.asyncio + async def test_discard_dlq_message_not_found( + self, test_user: AsyncClient + ) -> None: + """Discard nonexistent message returns 404.""" + response = await test_user.delete( + "/api/v1/dlq/messages/nonexistent-event-id", + params={"reason": "Test discard"}, + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_discard_dlq_message(self, test_user: AsyncClient) -> None: + """Discard a DLQ message if messages exist.""" + # First list messages to find one + list_response = await test_user.get( + "/api/v1/dlq/messages", + params={"limit": 1}, + ) + + if list_response.status_code == 200: + result = DLQMessagesResponse.model_validate(list_response.json()) + if result.messages: + event_id = result.messages[0].event.event_id + + # Discard + response = await test_user.delete( + f"/api/v1/dlq/messages/{event_id}", + params={"reason": "Test discard for E2E testing"}, + ) + + if response.status_code == 200: + msg_result = MessageResponse.model_validate( + response.json() + ) + assert event_id in msg_result.message + assert "discarded" in msg_result.message.lower() + + @pytest.mark.asyncio + async def test_discard_dlq_message_requires_reason( + self, test_user: AsyncClient + ) -> None: + """Discard requires reason parameter.""" + response = await test_user.delete( + "/api/v1/dlq/messages/some-event-id" + ) + assert response.status_code == 422 + + +class TestGetDLQTopics: + """Tests for GET /api/v1/dlq/topics.""" + + @pytest.mark.asyncio + async def test_get_dlq_topics(self, test_user: AsyncClient) -> None: + """Get DLQ topics summary.""" + response = await test_user.get("/api/v1/dlq/topics") + + assert response.status_code == 200 + topics = [ + DLQTopicSummaryResponse.model_validate(t) + for t in response.json() + ] + + for topic in topics: + assert topic.topic is not None + assert topic.total_messages >= 0 + assert isinstance(topic.status_breakdown, dict) + assert topic.avg_retry_count >= 0 + assert topic.max_retry_count >= 0 + + @pytest.mark.asyncio + async def test_get_dlq_topics_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/dlq/topics") + assert response.status_code == 401 diff --git a/backend/tests/e2e/test_events_routes.py b/backend/tests/e2e/test_events_routes.py new file mode 100644 index 00000000..4365c14c --- /dev/null +++ b/backend/tests/e2e/test_events_routes.py @@ -0,0 +1,532 @@ +import asyncio + +import pytest +from app.domain.enums.events import EventType +from app.domain.events.typed import DomainEvent +from app.schemas_pydantic.events import ( + DeleteEventResponse, + EventListResponse, + EventStatistics, + PublishEventResponse, + ReplayAggregateResponse, +) +from app.schemas_pydantic.execution import ExecutionResponse +from httpx import AsyncClient +from pydantic import TypeAdapter + +DomainEventAdapter: TypeAdapter[DomainEvent] = TypeAdapter(DomainEvent) + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +async def wait_for_user_events( + client: AsyncClient, + timeout: float = 30.0, + poll_interval: float = 0.5, +) -> EventListResponse: + """Poll until at least one event exists for the user. + + Args: + client: Authenticated HTTP client + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + EventListResponse with at least one event + + Raises: + TimeoutError: If no events appear within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout + + while asyncio.get_event_loop().time() < deadline: + response = await client.get("/api/v1/events/user", params={"limit": 10}) + assert response.status_code == 200, f"Unexpected: {response.status_code} - {response.text}" + + result = EventListResponse.model_validate(response.json()) + if result.events: + return result + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"No events appeared for user within {timeout}s") + + +async def wait_for_aggregate_events( + client: AsyncClient, + aggregate_id: str, + timeout: float = 30.0, + poll_interval: float = 0.5, +) -> EventListResponse: + """Poll until at least one event exists for the aggregate. + + Args: + client: Authenticated HTTP client + aggregate_id: Aggregate ID (execution_id) to check + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + EventListResponse with at least one event + + Raises: + TimeoutError: If no events appear within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout + + while asyncio.get_event_loop().time() < deadline: + response = await client.get( + f"/api/v1/events/executions/{aggregate_id}/events", + params={"limit": 10}, + ) + assert response.status_code == 200, f"Unexpected: {response.status_code} - {response.text}" + + result = EventListResponse.model_validate(response.json()) + if result.events: + return result + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"No events appeared for aggregate {aggregate_id} within {timeout}s") + + +class TestExecutionEvents: + """Tests for GET /api/v1/events/executions/{execution_id}/events.""" + + @pytest.mark.asyncio + async def test_get_execution_events( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get events for a specific execution.""" + result = await wait_for_aggregate_events(test_user, created_execution.execution_id) + + assert result.total >= 1 + assert result.limit == 10 + assert result.skip == 0 + assert isinstance(result.has_more, bool) + assert len(result.events) >= 1 + + @pytest.mark.asyncio + async def test_get_execution_events_pagination( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Pagination works for execution events.""" + await wait_for_aggregate_events(test_user, created_execution.execution_id) + + response = await test_user.get( + f"/api/v1/events/executions/{created_execution.execution_id}/events", + params={"limit": 5, "skip": 0}, + ) + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert result.limit == 5 + assert result.skip == 0 + + @pytest.mark.asyncio + async def test_get_execution_events_access_denied( + self, test_user: AsyncClient, another_user: AsyncClient, + created_execution: ExecutionResponse + ) -> None: + """Cannot access another user's execution events.""" + response = await another_user.get( + f"/api/v1/events/executions/{created_execution.execution_id}/events" + ) + + assert response.status_code == 403 + + +class TestUserEvents: + """Tests for GET /api/v1/events/user.""" + + @pytest.mark.asyncio + async def test_get_user_events( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get events for current user.""" + result = await wait_for_user_events(test_user) + + assert result.total >= 1 + assert len(result.events) >= 1 + + @pytest.mark.asyncio + async def test_get_user_events_with_filters( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Filter user events by event types.""" + await wait_for_user_events(test_user) + + response = await test_user.get( + "/api/v1/events/user", + params={ + "event_types": [EventType.EXECUTION_REQUESTED], + "limit": 10, + }, + ) + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert result.limit == 10 + + @pytest.mark.asyncio + async def test_get_user_events_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/events/user") + assert response.status_code == 401 + + +class TestQueryEvents: + """Tests for POST /api/v1/events/query.""" + + @pytest.mark.asyncio + async def test_query_events( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Query events with filters.""" + await wait_for_user_events(test_user) + + response = await test_user.post( + "/api/v1/events/query", + json={ + "event_types": [EventType.EXECUTION_REQUESTED], + "limit": 50, + "skip": 0, + }, + ) + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert result.limit == 50 + + @pytest.mark.asyncio + async def test_query_events_with_correlation_id( + self, test_user: AsyncClient + ) -> None: + """Query events by correlation ID returns empty for nonexistent.""" + response = await test_user.post( + "/api/v1/events/query", + json={ + "correlation_id": "nonexistent-correlation-123", + "limit": 100, + }, + ) + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert isinstance(result.events, list) + assert result.total == 0 + + +class TestCorrelationEvents: + """Tests for GET /api/v1/events/correlation/{correlation_id}.""" + + @pytest.mark.asyncio + async def test_get_events_by_nonexistent_correlation( + self, test_user: AsyncClient + ) -> None: + """Get events by nonexistent correlation ID returns empty.""" + response = await test_user.get( + "/api/v1/events/correlation/nonexistent-correlation-xyz" + ) + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert isinstance(result.events, list) + assert result.total == 0 + + +class TestCurrentRequestEvents: + """Tests for GET /api/v1/events/current-request.""" + + @pytest.mark.asyncio + async def test_get_current_request_events( + self, test_user: AsyncClient + ) -> None: + """Get events for current request correlation.""" + response = await test_user.get("/api/v1/events/current-request") + + assert response.status_code == 200 + result = EventListResponse.model_validate(response.json()) + assert isinstance(result.events, list) + + +class TestEventStatistics: + """Tests for GET /api/v1/events/statistics.""" + + @pytest.mark.asyncio + async def test_get_event_statistics( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get event statistics for current user.""" + await wait_for_user_events(test_user) + + response = await test_user.get("/api/v1/events/statistics") + + assert response.status_code == 200 + stats = EventStatistics.model_validate(response.json()) + + assert stats.total_events >= 1 + assert stats.events_by_type is not None + assert stats.events_by_service is not None + + @pytest.mark.asyncio + async def test_get_event_statistics_with_time_range( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get event statistics with time range.""" + await wait_for_user_events(test_user) + + response = await test_user.get( + "/api/v1/events/statistics", + params={ + "start_time": "2024-01-01T00:00:00Z", + "end_time": "2030-01-01T00:00:00Z", + }, + ) + + assert response.status_code == 200 + stats = EventStatistics.model_validate(response.json()) + assert stats.total_events >= 1 + + +class TestSingleEvent: + """Tests for GET /api/v1/events/{event_id}.""" + + @pytest.mark.asyncio + async def test_get_event_not_found(self, test_user: AsyncClient) -> None: + """Get nonexistent event returns 404.""" + response = await test_user.get("/api/v1/events/nonexistent-event-id") + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_event_by_id( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get single event by ID.""" + events_result = await wait_for_user_events(test_user) + event_id = events_result.events[0].event_id + + response = await test_user.get(f"/api/v1/events/{event_id}") + + assert response.status_code == 200 + event = DomainEventAdapter.validate_python(response.json()) + assert event.event_id == event_id + + +class TestPublishEvent: + """Tests for POST /api/v1/events/publish (admin only).""" + + @pytest.mark.asyncio + async def test_publish_event_admin_only( + self, test_admin: AsyncClient + ) -> None: + """Admin can publish custom events.""" + response = await test_admin.post( + "/api/v1/events/publish", + json={ + "event_type": EventType.SYSTEM_ERROR, + "payload": { + "error_type": "test_error", + "message": "Test error message", + "service_name": "test-service", + }, + "aggregate_id": "test-aggregate-123", + }, + ) + + assert response.status_code == 200 + result = PublishEventResponse.model_validate(response.json()) + assert result.event_id is not None + assert result.status == "published" + assert result.timestamp is not None + + @pytest.mark.asyncio + async def test_publish_event_forbidden_for_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot publish events.""" + response = await test_user.post( + "/api/v1/events/publish", + json={ + "event_type": EventType.SYSTEM_ERROR, + "payload": { + "error_type": "test_error", + "message": "Test error message", + "service_name": "test-service", + }, + }, + ) + + assert response.status_code == 403 + + +class TestAggregateEvents: + """Tests for POST /api/v1/events/aggregate.""" + + @pytest.mark.asyncio + async def test_aggregate_events( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Aggregate events with MongoDB pipeline.""" + await wait_for_user_events(test_user) + + response = await test_user.post( + "/api/v1/events/aggregate", + json={ + "pipeline": [ + {"$group": {"_id": "$event_type", "count": {"$sum": 1}}} + ], + "limit": 100, + }, + ) + + assert response.status_code == 200 + result = response.json() + assert isinstance(result, list) + assert len(result) >= 1 + + +class TestListEventTypes: + """Tests for GET /api/v1/events/types/list.""" + + @pytest.mark.asyncio + async def test_list_event_types(self, test_user: AsyncClient) -> None: + """List available event types.""" + response = await test_user.get("/api/v1/events/types/list") + + assert response.status_code == 200 + result = response.json() + assert isinstance(result, list) + assert len(result) > 0 + + +class TestDeleteEvent: + """Tests for DELETE /api/v1/events/{event_id} (admin only).""" + + @pytest.mark.asyncio + async def test_delete_event_admin_only( + self, test_admin: AsyncClient + ) -> None: + """Admin can delete events.""" + publish_response = await test_admin.post( + "/api/v1/events/publish", + json={ + "event_type": EventType.SYSTEM_ERROR, + "payload": { + "error_type": "test_delete_error", + "message": "Event to be deleted", + "service_name": "test-service", + }, + "aggregate_id": "delete-test-agg", + }, + ) + + assert publish_response.status_code == 200 + event_id = publish_response.json()["event_id"] + + delete_response = await test_admin.delete( + f"/api/v1/events/{event_id}" + ) + + assert delete_response.status_code == 200 + result = DeleteEventResponse.model_validate(delete_response.json()) + assert result.event_id == event_id + assert "deleted" in result.message.lower() + + @pytest.mark.asyncio + async def test_delete_event_forbidden_for_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot delete events.""" + response = await test_user.delete("/api/v1/events/some-event-id") + + assert response.status_code == 403 + + +class TestReplayAggregateEvents: + """Tests for POST /api/v1/events/replay/{aggregate_id}.""" + + @pytest.mark.asyncio + async def test_replay_events_dry_run( + self, test_admin: AsyncClient, created_execution_admin: ExecutionResponse + ) -> None: + """Replay events in dry run mode.""" + await wait_for_aggregate_events(test_admin, created_execution_admin.execution_id) + + response = await test_admin.post( + f"/api/v1/events/replay/{created_execution_admin.execution_id}", + params={"dry_run": True}, + ) + + assert response.status_code == 200 + result = ReplayAggregateResponse.model_validate(response.json()) + assert result.dry_run is True + assert result.aggregate_id == created_execution_admin.execution_id + + @pytest.mark.asyncio + async def test_replay_events_not_found( + self, test_admin: AsyncClient + ) -> None: + """Replay nonexistent aggregate returns 404.""" + response = await test_admin.post( + "/api/v1/events/replay/nonexistent-aggregate", + params={"dry_run": True}, + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_replay_events_forbidden_for_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot replay events.""" + response = await test_user.post( + "/api/v1/events/replay/some-aggregate", + params={"dry_run": True}, + ) + + assert response.status_code == 403 + + +class TestEventIsolation: + """Tests for event access isolation between users.""" + + @pytest.mark.asyncio + async def test_user_cannot_access_other_users_events( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """User cannot access another user's execution events.""" + await wait_for_aggregate_events(test_user, created_execution.execution_id) + + response = await another_user.get( + f"/api/v1/events/executions/{created_execution.execution_id}/events" + ) + + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_user_events_only_shows_own_events( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """User events endpoint only returns user's own events.""" + events_result = await wait_for_user_events(test_user) + user_event_ids = {e.event_id for e in events_result.events} + + another_response = await another_user.get("/api/v1/events/user") + assert another_response.status_code == 200 + + another_result = EventListResponse.model_validate(another_response.json()) + another_event_ids = {e.event_id for e in another_result.events} + + assert user_event_ids.isdisjoint(another_event_ids) diff --git a/backend/tests/e2e/test_execution_routes.py b/backend/tests/e2e/test_execution_routes.py index 67ed582c..049e9bde 100644 --- a/backend/tests/e2e/test_execution_routes.py +++ b/backend/tests/e2e/test_execution_routes.py @@ -1,417 +1,601 @@ +"""E2E tests for execution routes. + +Tests validate full execution lifecycle including waiting for terminal states. +E2E tests use the shared database (integr8scode_db) that workers also use, +enabling complete end-to-end validation of execution creation through completion. +""" + import asyncio -from uuid import UUID import pytest -from app.domain.enums.execution import ExecutionStatus as ExecutionStatusEnum +from app.domain.enums.events import EventType +from app.domain.enums.execution import ExecutionStatus +from app.domain.events.typed import ExecutionDomainEvent from app.schemas_pydantic.execution import ( + CancelExecutionRequest, + CancelResponse, + DeleteResponse, + ExampleScripts, + ExecutionListResponse, + ExecutionRequest, ExecutionResponse, ExecutionResult, - ResourceUsage, + ResourceLimits, + RetryExecutionRequest, ) from httpx import AsyncClient +from pydantic import TypeAdapter pytestmark = [pytest.mark.e2e, pytest.mark.k8s] +# TypeAdapter for parsing list of execution events from API response +ExecutionEventsAdapter = TypeAdapter(list[ExecutionDomainEvent]) + +# Initial states when execution is created +INITIAL_STATES = { + ExecutionStatus.QUEUED, + ExecutionStatus.SCHEDULED, + ExecutionStatus.RUNNING, +} + +# Terminal states that indicate execution has finished +TERMINAL_STATES = { + ExecutionStatus.COMPLETED, + ExecutionStatus.FAILED, + ExecutionStatus.TIMEOUT, + ExecutionStatus.CANCELLED, + ExecutionStatus.ERROR, +} + + +async def wait_for_terminal_state( + client: AsyncClient, + execution_id: str, + timeout: float = 90.0, + poll_interval: float = 1.0, +) -> ExecutionResult: + """Poll execution result until it reaches a terminal state. + + Args: + client: Authenticated HTTP client + execution_id: ID of execution to wait for + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + ExecutionResult with terminal status + + Raises: + TimeoutError: If execution doesn't reach terminal state within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout -class TestExecution: - """Test execution endpoints against real backend.""" + while asyncio.get_event_loop().time() < deadline: + response = await client.get(f"/api/v1/executions/{execution_id}/result") + + if response.status_code == 404: + # Result not ready yet, keep polling + await asyncio.sleep(poll_interval) + continue + + assert response.status_code == 200, f"Unexpected status {response.status_code}: {response.text}" + + result = ExecutionResult.model_validate(response.json()) + if result.status in TERMINAL_STATES: + return result + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"Execution {execution_id} did not reach terminal state within {timeout}s") + + +class TestExecutionAuthentication: + """Authentication requirement tests.""" @pytest.mark.asyncio async def test_execute_requires_authentication(self, client: AsyncClient) -> None: - """Test that execution requires authentication.""" - execution_request = { - "script": "print('Hello, World!')", - "lang": "python", - "lang_version": "3.11" - } - - response = await client.post("/api/v1/execute", json=execution_request) + """Unauthenticated request returns 401.""" + request = ExecutionRequest(script="print('test')", lang="python", lang_version="3.11") + response = await client.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 401 - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) + +class TestExecutionHappyPath: + """Tests for successful execution scenarios.""" + + @pytest.mark.asyncio + async def test_execute_simple_script_completes( + self, test_user: AsyncClient, simple_execution_request: ExecutionRequest + ) -> None: + """Simple script executes and completes successfully.""" + response = await test_user.post("/api/v1/execute", json=simple_execution_request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + assert exec_response.execution_id + assert exec_response.status in [ExecutionStatus.QUEUED, ExecutionStatus.SCHEDULED, ExecutionStatus.RUNNING] + + # Wait for completion + result = await wait_for_terminal_state(test_user, exec_response.execution_id) + + assert result.status == ExecutionStatus.COMPLETED + assert result.execution_id == exec_response.execution_id + assert result.lang == "python" + assert result.lang_version == "3.11" + assert result.stdout is not None + assert "test" in result.stdout + assert result.exit_code == 0 + + @pytest.mark.asyncio + async def test_execute_multiline_output(self, test_user: AsyncClient) -> None: + """Script with multiple print statements produces correct output.""" + request = ExecutionRequest( + script="print('Line 1')\nprint('Line 2')\nprint('Line 3')", + lang="python", + lang_version="3.11", + ) + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + result = await wait_for_terminal_state(test_user, exec_response.execution_id) + + assert result.status == ExecutionStatus.COMPLETED + assert result.stdout is not None + assert "Line 1" in result.stdout + assert "Line 2" in result.stdout + assert "Line 3" in result.stdout + + @pytest.mark.asyncio + async def test_execute_tracks_resource_usage(self, test_user: AsyncClient) -> None: + """Execution tracks resource usage metrics.""" + request = ExecutionRequest( + script="import time; data = list(range(10000)); time.sleep(0.1); print('done')", + lang="python", + lang_version="3.11", + ) + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + result = await wait_for_terminal_state(test_user, exec_response.execution_id) + + assert result.status == ExecutionStatus.COMPLETED + assert result.resource_usage is not None + assert result.resource_usage.execution_time_wall_seconds >= 0.1 + assert result.resource_usage.peak_memory_kb > 0 + + @pytest.mark.asyncio + async def test_execute_large_output(self, test_user: AsyncClient) -> None: + """Script with large output completes successfully.""" + request = ExecutionRequest( + script="for i in range(500): print(f'Line {i}: ' + 'x' * 50)\nprint('END')", + lang="python", + lang_version="3.11", + ) + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + result = await wait_for_terminal_state(test_user, exec_response.execution_id, timeout=120) + + assert result.status == ExecutionStatus.COMPLETED + assert result.stdout is not None + assert "END" in result.stdout + assert len(result.stdout) > 10000 + + +class TestExecutionErrors: + """Tests for execution error handling.""" + + @pytest.mark.asyncio + async def test_execute_syntax_error(self, test_user: AsyncClient) -> None: + """Script with syntax error fails with proper error info.""" + request = ExecutionRequest( + script="def broken(\n pass", # Missing closing paren + lang="python", + lang_version="3.11", + ) + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + result = await wait_for_terminal_state(test_user, exec_response.execution_id) + + assert result.status == ExecutionStatus.FAILED + assert result.stderr is not None + assert "SyntaxError" in result.stderr + assert result.exit_code != 0 + + @pytest.mark.asyncio + async def test_execute_runtime_error(self, test_user: AsyncClient) -> None: + """Script with runtime error fails with traceback.""" + request = ExecutionRequest( + script="print('before')\nraise ValueError('test error')\nprint('after')", + lang="python", + lang_version="3.11", + ) + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + result = await wait_for_terminal_state(test_user, exec_response.execution_id) + + assert result.status == ExecutionStatus.FAILED + assert result.stdout is not None + assert "before" in result.stdout + assert "after" not in (result.stdout or "") + assert result.stderr is not None + assert "ValueError" in result.stderr + assert "test error" in result.stderr + + +class TestExecutionCancel: + """Tests for execution cancellation.""" + + @pytest.mark.asyncio + async def test_cancel_running_execution( + self, test_user: AsyncClient, long_running_execution_request: ExecutionRequest + ) -> None: + """Running execution can be cancelled.""" + response = await test_user.post("/api/v1/execute", json=long_running_execution_request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + + # Give it a moment to start + await asyncio.sleep(1) + + cancel_req = CancelExecutionRequest(reason="Test cancellation") + cancel_response = await test_user.post( + f"/api/v1/executions/{exec_response.execution_id}/cancel", + json=cancel_req.model_dump(), + ) + assert cancel_response.status_code == 200 + + cancel_result = CancelResponse.model_validate(cancel_response.json()) + assert cancel_result.execution_id == exec_response.execution_id + assert cancel_result.status in ["cancellation_requested", "already_cancelled"] + + @pytest.mark.asyncio + async def test_cancel_completed_execution_fails(self, test_user: AsyncClient) -> None: + """Cannot cancel already completed execution.""" + request = ExecutionRequest(script="print('quick')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + + # Wait for completion + await wait_for_terminal_state(test_user, exec_response.execution_id) + + cancel_req = CancelExecutionRequest(reason="Too late") + cancel_response = await test_user.post( + f"/api/v1/executions/{exec_response.execution_id}/cancel", + json=cancel_req.model_dump(), + ) + + assert cancel_response.status_code == 400 + assert "completed" in cancel_response.json()["detail"].lower() + + +class TestExecutionRetry: + """Tests for execution retry.""" @pytest.mark.asyncio - async def test_execute_simple_python_script(self, test_user: AsyncClient) -> None: - """Test executing a simple Python script.""" - # Execute script - execution_request = { - "script": "print('Hello from real backend!')", - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.post("/api/v1/execute", json=execution_request) + async def test_retry_completed_execution(self, test_user: AsyncClient) -> None: + """Completed execution can be retried.""" + request = ExecutionRequest(script="print('original')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) assert response.status_code == 200 - # Validate response structure - data = response.json() - execution_response = ExecutionResponse(**data) + original = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, original.execution_id) - # Verify execution_id - assert execution_response.execution_id is not None - assert len(execution_response.execution_id) > 0 + retry_req = RetryExecutionRequest() + retry_response = await test_user.post( + f"/api/v1/executions/{original.execution_id}/retry", + json=retry_req.model_dump(), + ) + assert retry_response.status_code == 200 - # Verify it's a valid UUID - try: - UUID(execution_response.execution_id) - except ValueError: - pytest.fail(f"Invalid execution_id format: {execution_response.execution_id}") + retried = ExecutionResponse.model_validate(retry_response.json()) + assert retried.execution_id != original.execution_id - # Verify status - assert execution_response.status in [ - ExecutionStatusEnum.QUEUED, - ExecutionStatusEnum.SCHEDULED, - ExecutionStatusEnum.RUNNING, - ExecutionStatusEnum.COMPLETED - ] + # Wait for retried execution to complete + result = await wait_for_terminal_state(test_user, retried.execution_id) + assert result.status == ExecutionStatus.COMPLETED + assert result.stdout is not None + assert "original" in result.stdout @pytest.mark.asyncio - async def test_get_execution_result(self, test_user: AsyncClient) -> None: - """Test getting execution result after completion using SSE (event-driven).""" - # Execute a simple script - execution_request = { - "script": "print('Test output')\nprint('Line 2')", - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - execution_id = exec_response.json()["execution_id"] - - # Immediately fetch result - no waiting - result_response = await test_user.get(f"/api/v1/result/{execution_id}") - assert result_response.status_code == 200 - - result_data = result_response.json() - execution_result = ExecutionResult(**result_data) - assert execution_result.execution_id == execution_id - assert execution_result.status in list(ExecutionStatusEnum) - assert execution_result.lang == "python" - - # Execution might be in any state - that's fine - # If completed, validate output; if not, that's valid too - if execution_result.status == ExecutionStatusEnum.COMPLETED: - assert execution_result.stdout is not None - assert "Test output" in execution_result.stdout - assert "Line 2" in execution_result.stdout + async def test_retry_running_execution_fails( + self, test_user: AsyncClient, long_running_execution_request: ExecutionRequest + ) -> None: + """Cannot retry execution that is still running.""" + response = await test_user.post("/api/v1/execute", json=long_running_execution_request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + + retry_req = RetryExecutionRequest() + retry_response = await test_user.post( + f"/api/v1/executions/{exec_response.execution_id}/retry", + json=retry_req.model_dump(), + ) + + assert retry_response.status_code == 400 @pytest.mark.asyncio - async def test_execute_with_error(self, test_user: AsyncClient) -> None: - """Test executing a script that produces an error.""" - # Execute script with intentional error - execution_request = { - "script": "print('Before error')\nraise ValueError('Test error')\nprint('After error')", - "lang": "python", - "lang_version": "3.11" - } + async def test_retry_other_users_execution_forbidden( + self, test_user: AsyncClient, another_user: AsyncClient + ) -> None: + """Cannot retry another user's execution.""" + request = ExecutionRequest(script="print('owned')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, exec_response.execution_id) + + retry_req = RetryExecutionRequest() + retry_response = await another_user.post( + f"/api/v1/executions/{exec_response.execution_id}/retry", + json=retry_req.model_dump(), + ) - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 + assert retry_response.status_code == 403 - exec_response.json()["execution_id"] - # No waiting - execution was accepted, error will be processed asynchronously +class TestExecutionEvents: + """Tests for execution events.""" @pytest.mark.asyncio - async def test_execute_with_resource_tracking(self, test_user: AsyncClient) -> None: - """Test that execution tracks resource usage.""" - # Execute script that uses some resources - execution_request = { - "script": """ -import time -# Create some memory usage -data = [i for i in range(10000)] -print(f'Created list with {len(data)} items') -time.sleep(0.1) # Small delay to ensure measurable execution time -print('Done') -""", - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - execution_id = exec_response.json()["execution_id"] - - # No waiting - execution was accepted, error will be processed asynchronously - - # Fetch result and validate resource usage if present - result_response = await test_user.get(f"/api/v1/result/{execution_id}") - if result_response.status_code == 200 and result_response.json().get("resource_usage"): - resource_usage = ResourceUsage(**result_response.json()["resource_usage"]) - if resource_usage.execution_time_wall_seconds is not None: - assert resource_usage.execution_time_wall_seconds >= 0 - if resource_usage.peak_memory_kb is not None: - assert resource_usage.peak_memory_kb >= 0 + async def test_get_execution_events(self, test_user: AsyncClient) -> None: + """Get events for completed execution.""" + request = ExecutionRequest(script="print('events test')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, exec_response.execution_id) + + events_response = await test_user.get(f"/api/v1/executions/{exec_response.execution_id}/events") + assert events_response.status_code == 200 + + events = ExecutionEventsAdapter.validate_python(events_response.json()) + assert len(events) > 0 + + # Should have at least EXECUTION_REQUESTED event + event_types = {e.event_type for e in events} + assert EventType.EXECUTION_REQUESTED in event_types @pytest.mark.asyncio - async def test_execute_with_different_language_versions(self, test_user: AsyncClient) -> None: - """Test execution with different Python versions.""" - # Test different Python versions (if supported) - test_cases = [ - ("3.10", "import sys; print(f'Python {sys.version}')"), - ("3.11", "import sys; print(f'Python {sys.version}')"), - ("3.12", "import sys; print(f'Python {sys.version}')") - ] - - for version, script in test_cases: - execution_request = { - "script": script, - "lang": "python", - "lang_version": version - } - - response = await test_user.post("/api/v1/execute", json=execution_request) - # Should either accept (200) or reject unsupported version (400/422) - assert response.status_code in [200, 400, 422] - - if response.status_code == 200: - data = response.json() - assert "execution_id" in data + async def test_get_events_filtered_by_type(self, test_user: AsyncClient) -> None: + """Filter events by event type.""" + request = ExecutionRequest(script="print('filter test')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, exec_response.execution_id) + + events_response = await test_user.get( + f"/api/v1/executions/{exec_response.execution_id}/events", + params={"event_types": [EventType.EXECUTION_REQUESTED]}, + ) + assert events_response.status_code == 200 + + events = ExecutionEventsAdapter.validate_python(events_response.json()) + for event in events: + assert event.event_type == EventType.EXECUTION_REQUESTED @pytest.mark.asyncio - async def test_execute_with_large_output(self, test_user: AsyncClient) -> None: - """Test execution with large output.""" - # Script that produces large output - execution_request = { - "script": """ -# Generate large output -for i in range(1000): - print(f'Line {i}: ' + 'x' * 50) -print('End of output') -""", - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - execution_id = exec_response.json()["execution_id"] - - # No waiting - execution was accepted, error will be processed asynchronously - # Validate output from result endpoint (best-effort) - result_response = await test_user.get(f"/api/v1/result/{execution_id}") - if result_response.status_code == 200: - result_data = result_response.json() - if result_data.get("status") == "COMPLETED": - assert result_data.get("stdout") is not None - assert len(result_data["stdout"]) > 0 - assert "End of output" in result_data["stdout"] or len(result_data["stdout"]) > 10000 + async def test_get_events_access_denied(self, test_user: AsyncClient, another_user: AsyncClient) -> None: + """Cannot access another user's execution events.""" + request = ExecutionRequest(script="print('private')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + + events_response = await another_user.get(f"/api/v1/executions/{exec_response.execution_id}/events") + assert events_response.status_code == 403 + + +class TestExecutionDelete: + """Tests for execution deletion (admin only).""" @pytest.mark.asyncio - async def test_cancel_running_execution(self, test_user: AsyncClient) -> None: - """Test cancelling a running execution.""" - # Start a long-running script - execution_request = { - "script": """ -import time -print('Starting long task...') -for i in range(30): - print(f'Iteration {i}') - time.sleep(1) -print('Should not reach here if cancelled') -""", - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - execution_id = exec_response.json()["execution_id"] - - # Try to cancel immediately - no waiting - cancel_request = { - "reason": "Test cancellation" - } - - try: - cancel_response = await test_user.post( - f"/api/v1/{execution_id}/cancel", json=cancel_request - ) - except Exception: - pytest.skip("Cancel endpoint not available or connection dropped") - if cancel_response.status_code >= 500: - pytest.skip("Cancellation not wired; backend returned 5xx") - # Should succeed or fail if already completed - assert cancel_response.status_code in [200, 400, 404] - - # Cancel response of 200 means cancellation was accepted + @pytest.mark.admin + async def test_admin_delete_execution(self, test_user: AsyncClient, test_admin: AsyncClient) -> None: + """Admin can delete an execution.""" + request = ExecutionRequest(script="print('to delete')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, exec_response.execution_id) + + delete_response = await test_admin.delete(f"/api/v1/executions/{exec_response.execution_id}") + assert delete_response.status_code == 200 + + result = DeleteResponse.model_validate(delete_response.json()) + assert result.execution_id == exec_response.execution_id + assert result.message == "Execution deleted successfully" + + # Verify gone + get_response = await test_admin.get(f"/api/v1/executions/{exec_response.execution_id}/result") + assert get_response.status_code == 404 @pytest.mark.asyncio - async def test_execution_with_timeout(self, test_user: AsyncClient) -> None: - """Bounded check: long-running executions don't finish immediately. - - The backend's default timeout is 300s. To keep integration fast, - assert that within a short window the execution is either still - running or has transitioned to a terminal state due to platform limits. - """ - # Script that would run forever - execution_request = { - "script": """ -import time -print('Starting infinite loop...') -while True: - time.sleep(1) - print('Still running...') -""", - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - exec_response.json()["execution_id"] - - # Just verify the execution was created - it will run forever until timeout - # No need to wait or observe states + async def test_user_cannot_delete_execution(self, test_user: AsyncClient) -> None: + """Regular user cannot delete execution.""" + request = ExecutionRequest(script="print('no delete')", lang="python", lang_version="3.11") + + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + + delete_response = await test_user.delete(f"/api/v1/executions/{exec_response.execution_id}") + assert delete_response.status_code == 403 + + @pytest.mark.asyncio + @pytest.mark.admin + async def test_delete_nonexistent_execution(self, test_admin: AsyncClient) -> None: + """Deleting nonexistent execution returns 404.""" + delete_response = await test_admin.delete("/api/v1/executions/nonexistent-id-xyz") + assert delete_response.status_code == 404 + + +class TestExecutionList: + """Tests for execution listing.""" + + @pytest.mark.asyncio + async def test_get_user_executions(self, test_user: AsyncClient) -> None: + """User can list their executions.""" + # Create an execution + request = ExecutionRequest(script="print('list test')", lang="python", lang_version="3.11") + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + exec_response = ExecutionResponse.model_validate(response.json()) + await wait_for_terminal_state(test_user, exec_response.execution_id) + + # List executions + list_response = await test_user.get("/api/v1/user/executions", params={"limit": 10, "skip": 0}) + assert list_response.status_code == 200 + + result = ExecutionListResponse.model_validate(list_response.json()) + assert result.limit == 10 + assert result.skip == 0 + assert result.total >= 1 + assert len(result.executions) >= 1 @pytest.mark.asyncio - async def test_sandbox_restrictions(self, test_user: AsyncClient) -> None: - """Test that dangerous operations are blocked by sandbox.""" - # Try dangerous operations that should be blocked - dangerous_scripts = [ - # File system access - "open('/etc/passwd', 'r').read()", - # Network access - "import socket; socket.socket().connect(('google.com', 80))", - # System commands - "import os; os.system('ls /')", - # Process manipulation - "import subprocess; subprocess.run(['ps', 'aux'])" - ] - - for script in dangerous_scripts: - execution_request = { - "script": script, - "lang": "python", - "lang_version": "3.11" - } - - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - - # Should either reject immediately or fail during execution - if exec_response.status_code == 200: - execution_id = exec_response.json()["execution_id"] - - # Immediately check result - no waiting - result_resp = await test_user.get(f"/api/v1/result/{execution_id}") - if result_resp.status_code == 200: - result_data = result_resp.json() - # Dangerous operations should either: - # 1. Be in queued/running state (not yet executed) - # 2. Have failed/errored if sandbox blocked them - # 3. Have output showing permission denied - if result_data.get("status") == "COMPLETED": - output = result_data.get("stdout", "").lower() - # Should have been blocked - assert "denied" in output or "permission" in output or "error" in output - elif result_data.get("status") == "FAILED": - # Good - sandbox blocked it - pass - # Otherwise it's still queued/running which is fine - else: - # Rejected at submission time (also acceptable) - assert exec_response.status_code in [400, 422] + async def test_list_executions_pagination(self, test_user: AsyncClient) -> None: + """Pagination works for execution list.""" + # Create executions + for i in range(3): + request = ExecutionRequest(script=f"print('page {i}')", lang="python", lang_version="3.11") + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + # Get first page + page1_response = await test_user.get("/api/v1/user/executions", params={"limit": 2, "skip": 0}) + assert page1_response.status_code == 200 + + page1 = ExecutionListResponse.model_validate(page1_response.json()) + assert page1.limit == 2 + assert page1.skip == 0 + assert len(page1.executions) == 2 + + # Get second page + page2_response = await test_user.get("/api/v1/user/executions", params={"limit": 2, "skip": 2}) + assert page2_response.status_code == 200 + + page2 = ExecutionListResponse.model_validate(page2_response.json()) + assert page2.skip == 2 @pytest.mark.asyncio - async def test_concurrent_executions_by_same_user(self, test_user: AsyncClient) -> None: - """Test running multiple executions concurrently.""" - # Submit multiple executions - execution_request = { - "script": "import time; time.sleep(1); print('Concurrent test')", - "lang": "python", - "lang_version": "3.11" - } + async def test_list_executions_filter_by_language(self, test_user: AsyncClient) -> None: + """Filter executions by language.""" + request = ExecutionRequest(script="print('python')", lang="python", lang_version="3.11") + response = await test_user.post("/api/v1/execute", json=request.model_dump()) + assert response.status_code == 200 + + list_response = await test_user.get("/api/v1/user/executions", params={"lang": "python"}) + assert list_response.status_code == 200 + + result = ExecutionListResponse.model_validate(list_response.json()) + for execution in result.executions: + assert execution.lang == "python" + +class TestExecutionIdempotency: + """Tests for idempotency.""" + + @pytest.mark.asyncio + async def test_same_idempotency_key_returns_same_execution(self, test_user: AsyncClient) -> None: + """Same idempotency key returns same execution ID.""" + request = ExecutionRequest(script="print('idempotent')", lang="python", lang_version="3.11") + headers = {"Idempotency-Key": "unique-key-12345"} + + response1 = await test_user.post("/api/v1/execute", json=request.model_dump(), headers=headers) + assert response1.status_code == 200 + exec1 = ExecutionResponse.model_validate(response1.json()) + + response2 = await test_user.post("/api/v1/execute", json=request.model_dump(), headers=headers) + assert response2.status_code == 200 + exec2 = ExecutionResponse.model_validate(response2.json()) + + assert exec1.execution_id == exec2.execution_id + + +class TestExecutionConcurrency: + """Tests for concurrent executions.""" + + @pytest.mark.asyncio + async def test_concurrent_executions(self, test_user: AsyncClient) -> None: + """Multiple concurrent executions work correctly.""" tasks = [] - for _ in range(3): - task = test_user.post("/api/v1/execute", json=execution_request) - tasks.append(task) + for i in range(3): + request = ExecutionRequest(script=f"print('concurrent {i}')", lang="python", lang_version="3.11") + tasks.append(test_user.post("/api/v1/execute", json=request.model_dump())) responses = await asyncio.gather(*tasks) - execution_ids = [] + execution_ids = set() for response in responses: - # Should succeed or be rate limited - assert response.status_code in [200, 429] + assert response.status_code == 200 + exec_response = ExecutionResponse.model_validate(response.json()) + execution_ids.add(exec_response.execution_id) - if response.status_code == 200: - data = response.json() - execution_ids.append(data["execution_id"]) + # All IDs should be unique + assert len(execution_ids) == 3 - # All successful executions should have unique IDs - assert len(execution_ids) == len(set(execution_ids)) + # Wait for all to complete + for exec_id in execution_ids: + result = await wait_for_terminal_state(test_user, exec_id) + assert result.status == ExecutionStatus.COMPLETED - # Verify at least some succeeded - assert len(execution_ids) > 0 + +class TestPublicEndpoints: + """Tests for public (unauthenticated) endpoints.""" @pytest.mark.asyncio async def test_get_example_scripts(self, client: AsyncClient) -> None: - """Example scripts endpoint returns available example scripts.""" + """Example scripts endpoint returns scripts.""" response = await client.get("/api/v1/example-scripts") assert response.status_code == 200 - data = response.json() - assert isinstance(data, dict) - assert "scripts" in data - assert isinstance(data["scripts"], dict) + + result = ExampleScripts.model_validate(response.json()) + assert isinstance(result.scripts, dict) + assert "python" in result.scripts @pytest.mark.asyncio async def test_get_k8s_resource_limits(self, client: AsyncClient) -> None: - """K8s limits endpoint returns cluster execution limits if configured.""" + """K8s limits endpoint returns resource limits.""" response = await client.get("/api/v1/k8s-limits") assert response.status_code == 200 - limits = response.json() - # Validate ResourceLimits shape - for key in [ - "cpu_limit", - "memory_limit", - "cpu_request", - "memory_request", - "execution_timeout", - "supported_runtimes", - ]: - assert key in limits - - @pytest.mark.asyncio - async def test_get_user_executions_list(self, test_user: AsyncClient) -> None: - """User executions list returns paginated executions for current user.""" - # List executions - response = await test_user.get("/api/v1/user/executions?limit=5&skip=0") - assert response.status_code == 200 - payload = response.json() - assert set(["executions", "total", "limit", "skip", "has_more"]).issubset(payload.keys()) - @pytest.mark.asyncio - async def test_execution_idempotency_same_key_returns_same_execution( - self, test_user: AsyncClient - ) -> None: - """Submitting the same request with the same Idempotency-Key yields the same execution_id.""" - execution_request = { - "script": "print('Idempotency integration test')", - "lang": "python", - "lang_version": "3.11", - } - - # Add idempotency key header (CSRF is already set on test_user) - headers = {"Idempotency-Key": "it-idem-key-123"} - - # Use idempotency header on both requests to guarantee keying - r1 = await test_user.post("/api/v1/execute", json=execution_request, headers=headers) - assert r1.status_code == 200 - e1 = r1.json()["execution_id"] - - # Second request with same key must return the same execution id - r2 = await test_user.post("/api/v1/execute", json=execution_request, headers=headers) - assert r2.status_code == 200 - e2 = r2.json()["execution_id"] - - assert e1 == e2 + result = ResourceLimits.model_validate(response.json()) + assert result.cpu_limit + assert result.memory_limit + assert result.execution_timeout > 0 + assert "python" in result.supported_runtimes diff --git a/backend/tests/e2e/test_grafana_alerts_routes.py b/backend/tests/e2e/test_grafana_alerts_routes.py new file mode 100644 index 00000000..17d95ac4 --- /dev/null +++ b/backend/tests/e2e/test_grafana_alerts_routes.py @@ -0,0 +1,234 @@ +import pytest +from app.schemas_pydantic.grafana import AlertResponse +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + + +class TestGrafanaWebhook: + """Tests for POST /api/v1/alerts/grafana.""" + + @pytest.mark.asyncio + async def test_receive_grafana_alert(self, client: AsyncClient) -> None: + """Receive a Grafana alert webhook.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "status": "firing", + "receiver": "integr8s-receiver", + "alerts": [ + { + "status": "firing", + "labels": { + "alertname": "HighCPUUsage", + "severity": "critical", + "instance": "worker-1", + }, + "annotations": { + "summary": "High CPU usage detected", + "description": "CPU usage is above 90%", + }, + "valueString": "95%", + } + ], + "groupLabels": { + "alertname": "HighCPUUsage", + }, + "commonLabels": { + "severity": "critical", + }, + "commonAnnotations": { + "summary": "High CPU usage detected", + }, + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + + assert result.message is not None + assert result.alerts_received == 1 + assert result.alerts_processed >= 0 + assert isinstance(result.errors, list) + + @pytest.mark.asyncio + async def test_receive_multiple_grafana_alerts( + self, client: AsyncClient + ) -> None: + """Receive multiple alerts in one webhook.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "status": "firing", + "receiver": "integr8s-receiver", + "alerts": [ + { + "status": "firing", + "labels": {"alertname": "Alert1", "severity": "warning"}, + "annotations": {"summary": "Alert 1"}, + }, + { + "status": "firing", + "labels": {"alertname": "Alert2", "severity": "critical"}, + "annotations": {"summary": "Alert 2"}, + }, + { + "status": "resolved", + "labels": {"alertname": "Alert3", "severity": "info"}, + "annotations": {"summary": "Alert 3"}, + }, + ], + "groupLabels": {}, + "commonLabels": {}, + "commonAnnotations": {}, + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + + assert result.alerts_received == 3 + assert result.alerts_processed >= 0 + + @pytest.mark.asyncio + async def test_receive_grafana_alert_resolved( + self, client: AsyncClient + ) -> None: + """Receive a resolved alert.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "status": "resolved", + "receiver": "integr8s-receiver", + "alerts": [ + { + "status": "resolved", + "labels": { + "alertname": "HighMemoryUsage", + "severity": "warning", + }, + "annotations": { + "summary": "Memory usage back to normal", + }, + } + ], + "groupLabels": {}, + "commonLabels": {}, + "commonAnnotations": {}, + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + assert result.alerts_received == 1 + + @pytest.mark.asyncio + async def test_receive_grafana_alert_empty_alerts( + self, client: AsyncClient + ) -> None: + """Receive webhook with empty alerts list.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "status": "firing", + "receiver": "integr8s-receiver", + "alerts": [], + "groupLabels": {}, + "commonLabels": {}, + "commonAnnotations": {}, + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + assert result.alerts_received == 0 + assert result.alerts_processed == 0 + + @pytest.mark.asyncio + async def test_receive_grafana_alert_minimal_payload( + self, client: AsyncClient + ) -> None: + """Receive webhook with minimal payload.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "alerts": [ + { + "labels": {"alertname": "MinimalAlert"}, + } + ], + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + assert result.alerts_received == 1 + + @pytest.mark.asyncio + async def test_receive_grafana_alert_with_value_string( + self, client: AsyncClient + ) -> None: + """Receive alert with valueString field.""" + response = await client.post( + "/api/v1/alerts/grafana", + json={ + "status": "firing", + "alerts": [ + { + "status": "firing", + "labels": { + "alertname": "DiskSpaceLow", + "instance": "server-1", + }, + "annotations": { + "summary": "Disk space is running low", + }, + "valueString": "10% available", + } + ], + }, + ) + + assert response.status_code == 200 + result = AlertResponse.model_validate(response.json()) + assert result.alerts_received == 1 + + +class TestGrafanaTestEndpoint: + """Tests for GET /api/v1/alerts/grafana/test.""" + + @pytest.mark.asyncio + async def test_grafana_test_endpoint(self, client: AsyncClient) -> None: + """Test the Grafana webhook test endpoint.""" + response = await client.get("/api/v1/alerts/grafana/test") + + assert response.status_code == 200 + result = response.json() + + assert result["status"] == "ok" + assert "message" in result + assert "webhook_url" in result + assert result["webhook_url"] == "/api/v1/alerts/grafana" + + @pytest.mark.asyncio + async def test_grafana_test_endpoint_as_user( + self, test_user: AsyncClient + ) -> None: + """Authenticated user can access test endpoint.""" + response = await test_user.get("/api/v1/alerts/grafana/test") + + assert response.status_code == 200 + result = response.json() + assert result["status"] == "ok" + + @pytest.mark.asyncio + async def test_grafana_test_endpoint_as_admin( + self, test_admin: AsyncClient + ) -> None: + """Admin can access test endpoint.""" + response = await test_admin.get("/api/v1/alerts/grafana/test") + + assert response.status_code == 200 + result = response.json() + assert result["status"] == "ok" + assert "Grafana webhook endpoint is ready" in result["message"] diff --git a/backend/tests/e2e/test_health_routes.py b/backend/tests/e2e/test_health_routes.py new file mode 100644 index 00000000..be2f5de4 --- /dev/null +++ b/backend/tests/e2e/test_health_routes.py @@ -0,0 +1,86 @@ +import asyncio +import time + +import pytest +from app.api.routes.health import LivenessResponse, ReadinessResponse +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + + +class TestHealthRoutes: + """Tests for health check endpoints.""" + + @pytest.mark.asyncio + async def test_liveness_probe(self, client: AsyncClient) -> None: + """GET /health/live returns 200 with status ok.""" + response = await client.get("/api/v1/health/live") + + assert response.status_code == 200 + result = LivenessResponse.model_validate(response.json()) + + assert result.status == "ok" + assert result.uptime_seconds >= 0 + assert result.timestamp is not None + + @pytest.mark.asyncio + async def test_readiness_probe(self, client: AsyncClient) -> None: + """GET /health/ready returns 200 with status ok.""" + response = await client.get("/api/v1/health/ready") + + assert response.status_code == 200 + result = ReadinessResponse.model_validate(response.json()) + + assert result.status == "ok" + assert result.uptime_seconds >= 0 + + @pytest.mark.asyncio + async def test_liveness_no_auth_required(self, client: AsyncClient) -> None: + """Liveness probe does not require authentication.""" + # client fixture is unauthenticated + response = await client.get("/api/v1/health/live") + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_readiness_no_auth_required(self, client: AsyncClient) -> None: + """Readiness probe does not require authentication.""" + response = await client.get("/api/v1/health/ready") + assert response.status_code == 200 + + @pytest.mark.asyncio + async def test_uptime_increases(self, client: AsyncClient) -> None: + """Uptime should be consistent between calls.""" + response1 = await client.get("/api/v1/health/live") + result1 = LivenessResponse.model_validate(response1.json()) + + response2 = await client.get("/api/v1/health/live") + result2 = LivenessResponse.model_validate(response2.json()) + + # Uptime should be same or slightly higher + assert result2.uptime_seconds >= result1.uptime_seconds + + @pytest.mark.asyncio + async def test_liveness_is_fast(self, client: AsyncClient) -> None: + """Liveness check should respond within 1 second.""" + start = time.time() + r = await client.get("/api/v1/health/live") + assert r.status_code == 200 + assert time.time() - start < 1.0 + + @pytest.mark.asyncio + async def test_concurrent_liveness_fetch(self, client: AsyncClient) -> None: + """Multiple concurrent liveness checks should all succeed.""" + tasks = [client.get("/api/v1/health/live") for _ in range(5)] + responses = await asyncio.gather(*tasks) + assert all(r.status_code == 200 for r in responses) + + @pytest.mark.asyncio + async def test_nonexistent_health_routes_return_404(self, client: AsyncClient) -> None: + """Non-existent health routes should return 404.""" + for path in [ + "/api/v1/health/healthz", + "/api/v1/health/health", + "/api/v1/health/readyz", + ]: + r = await client.get(path) + assert r.status_code in (404, 405) diff --git a/backend/tests/e2e/test_notifications_routes.py b/backend/tests/e2e/test_notifications_routes.py new file mode 100644 index 00000000..8e9c7b8f --- /dev/null +++ b/backend/tests/e2e/test_notifications_routes.py @@ -0,0 +1,338 @@ +import asyncio + +import pytest +from app.domain.enums.notification import NotificationChannel, NotificationSeverity, NotificationStatus +from app.schemas_pydantic.execution import ExecutionResponse +from app.schemas_pydantic.notification import ( + DeleteNotificationResponse, + NotificationListResponse, + NotificationResponse, + NotificationSubscription, + SubscriptionsResponse, + UnreadCountResponse, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +async def wait_for_notification( + client: AsyncClient, + timeout: float = 30.0, + poll_interval: float = 0.5, +) -> NotificationResponse: + """Poll until at least one notification exists for the user. + + Args: + client: Authenticated HTTP client + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + First notification found + + Raises: + TimeoutError: If no notification appears within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout + + while asyncio.get_event_loop().time() < deadline: + response = await client.get("/api/v1/notifications", params={"limit": 10}) + assert response.status_code == 200, f"Unexpected: {response.status_code} - {response.text}" + + result = NotificationListResponse.model_validate(response.json()) + if result.notifications: + return result.notifications[0] + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"No notification appeared within {timeout}s") + + +class TestGetNotifications: + """Tests for GET /api/v1/notifications.""" + + @pytest.mark.asyncio + async def test_get_notifications_empty(self, test_user: AsyncClient) -> None: + """New user has empty or minimal notifications.""" + response = await test_user.get("/api/v1/notifications") + + assert response.status_code == 200 + result = NotificationListResponse.model_validate(response.json()) + + assert result.total >= 0 + assert result.unread_count >= 0 + assert isinstance(result.notifications, list) + + @pytest.mark.asyncio + async def test_get_notifications_pagination( + self, test_user: AsyncClient + ) -> None: + """Pagination parameters work correctly.""" + response = await test_user.get( + "/api/v1/notifications", + params={"limit": 10, "offset": 0}, + ) + + assert response.status_code == 200 + result = NotificationListResponse.model_validate(response.json()) + assert isinstance(result.notifications, list) + + @pytest.mark.asyncio + async def test_get_notifications_with_status_filter( + self, test_user: AsyncClient + ) -> None: + """Filter notifications by status.""" + response = await test_user.get( + "/api/v1/notifications", + params={"status": NotificationStatus.DELIVERED}, + ) + + assert response.status_code == 200 + result = NotificationListResponse.model_validate(response.json()) + assert isinstance(result.notifications, list) + + @pytest.mark.asyncio + async def test_get_notifications_with_tag_filters( + self, test_user: AsyncClient + ) -> None: + """Filter notifications by tags.""" + response = await test_user.get( + "/api/v1/notifications", + params={ + "include_tags": ["execution"], + "tag_prefix": "exec", + }, + ) + + assert response.status_code == 200 + result = NotificationListResponse.model_validate(response.json()) + assert isinstance(result.notifications, list) + + @pytest.mark.asyncio + async def test_get_notifications_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/notifications") + assert response.status_code == 401 + + +class TestMarkNotificationRead: + """Tests for PUT /api/v1/notifications/{notification_id}/read.""" + + @pytest.mark.asyncio + async def test_mark_nonexistent_notification_read( + self, test_user: AsyncClient + ) -> None: + """Marking nonexistent notification returns 404.""" + response = await test_user.put( + "/api/v1/notifications/00000000-0000-0000-0000-000000000000/read" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_mark_notification_read( + self, + test_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """Mark existing notification as read.""" + notification = await wait_for_notification(test_user) + + response = await test_user.put( + f"/api/v1/notifications/{notification.notification_id}/read" + ) + + assert response.status_code == 200 + result = NotificationResponse.model_validate(response.json()) + assert result.notification_id == notification.notification_id + + +class TestMarkAllRead: + """Tests for POST /api/v1/notifications/mark-all-read.""" + + @pytest.mark.asyncio + async def test_mark_all_read(self, test_user: AsyncClient) -> None: + """Mark all notifications as read returns 204.""" + response = await test_user.post("/api/v1/notifications/mark-all-read") + + assert response.status_code == 204 + + @pytest.mark.asyncio + async def test_mark_all_read_idempotent( + self, test_user: AsyncClient + ) -> None: + """Mark all read is idempotent.""" + response1 = await test_user.post("/api/v1/notifications/mark-all-read") + response2 = await test_user.post("/api/v1/notifications/mark-all-read") + + assert response1.status_code == 204 + assert response2.status_code == 204 + + +class TestSubscriptions: + """Tests for /api/v1/notifications/subscriptions.""" + + @pytest.mark.asyncio + async def test_get_subscriptions(self, test_user: AsyncClient) -> None: + """Get notification subscriptions.""" + response = await test_user.get("/api/v1/notifications/subscriptions") + + assert response.status_code == 200 + result = SubscriptionsResponse.model_validate(response.json()) + + assert isinstance(result.subscriptions, list) + for sub in result.subscriptions: + assert sub.channel is not None + + @pytest.mark.asyncio + async def test_update_subscription(self, test_user: AsyncClient) -> None: + """Update a subscription channel.""" + response = await test_user.put( + f"/api/v1/notifications/subscriptions/{NotificationChannel.IN_APP}", + json={ + "enabled": True, + "severities": [NotificationSeverity.LOW, NotificationSeverity.MEDIUM, NotificationSeverity.HIGH], + }, + ) + + assert response.status_code == 200 + result = NotificationSubscription.model_validate(response.json()) + + assert result.enabled is True + assert result.channel == NotificationChannel.IN_APP + + @pytest.mark.asyncio + async def test_update_subscription_disable( + self, test_user: AsyncClient + ) -> None: + """Disable a subscription channel.""" + response = await test_user.put( + f"/api/v1/notifications/subscriptions/{NotificationChannel.IN_APP}", + json={"enabled": False}, + ) + + assert response.status_code == 200 + result = NotificationSubscription.model_validate(response.json()) + assert result.enabled is False + + @pytest.mark.asyncio + async def test_update_subscription_with_tags( + self, test_user: AsyncClient + ) -> None: + """Update subscription with tag filters.""" + response = await test_user.put( + f"/api/v1/notifications/subscriptions/{NotificationChannel.IN_APP}", + json={ + "enabled": True, + "include_tags": ["execution", "system"], + "exclude_tags": ["debug"], + }, + ) + + assert response.status_code == 200 + result = NotificationSubscription.model_validate(response.json()) + assert result.enabled is True + + +class TestUnreadCount: + """Tests for GET /api/v1/notifications/unread-count.""" + + @pytest.mark.asyncio + async def test_get_unread_count(self, test_user: AsyncClient) -> None: + """Get unread notification count.""" + response = await test_user.get("/api/v1/notifications/unread-count") + + assert response.status_code == 200 + result = UnreadCountResponse.model_validate(response.json()) + + assert result.unread_count >= 0 + assert isinstance(result.unread_count, int) + + +class TestDeleteNotification: + """Tests for DELETE /api/v1/notifications/{notification_id}.""" + + @pytest.mark.asyncio + async def test_delete_nonexistent_notification( + self, test_user: AsyncClient + ) -> None: + """Deleting nonexistent notification returns 404.""" + response = await test_user.delete( + "/api/v1/notifications/00000000-0000-0000-0000-000000000000" + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_notification( + self, + test_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """Delete existing notification returns success.""" + notification = await wait_for_notification(test_user) + + response = await test_user.delete( + f"/api/v1/notifications/{notification.notification_id}" + ) + + assert response.status_code == 200 + result = DeleteNotificationResponse.model_validate(response.json()) + assert "deleted" in result.message.lower() + + +class TestNotificationIsolation: + """Tests for notification access isolation between users.""" + + @pytest.mark.asyncio + async def test_user_cannot_see_other_users_notifications( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """User's notification list does not include other users' notifications.""" + notification = await wait_for_notification(test_user) + + response = await another_user.get("/api/v1/notifications") + assert response.status_code == 200 + + result = NotificationListResponse.model_validate(response.json()) + notification_ids = [n.notification_id for n in result.notifications] + + assert notification.notification_id not in notification_ids + + @pytest.mark.asyncio + async def test_cannot_mark_other_users_notification_read( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """Cannot mark another user's notification as read.""" + notification = await wait_for_notification(test_user) + + response = await another_user.put( + f"/api/v1/notifications/{notification.notification_id}/read" + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_cannot_delete_other_users_notification( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """Cannot delete another user's notification.""" + notification = await wait_for_notification(test_user) + + response = await another_user.delete( + f"/api/v1/notifications/{notification.notification_id}" + ) + + assert response.status_code == 404 diff --git a/backend/tests/e2e/test_replay_routes.py b/backend/tests/e2e/test_replay_routes.py new file mode 100644 index 00000000..49e78934 --- /dev/null +++ b/backend/tests/e2e/test_replay_routes.py @@ -0,0 +1,459 @@ +import pytest +from app.domain.enums.events import EventType +from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType +from app.domain.replay import ReplayFilter +from app.schemas_pydantic.replay import ( + CleanupResponse, + ReplayRequest, + ReplayResponse, + SessionSummary, +) +from app.schemas_pydantic.replay_models import ReplaySession +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.admin, pytest.mark.kafka] + + +class TestCreateReplaySession: + """Tests for POST /api/v1/replay/sessions.""" + + @pytest.mark.asyncio + async def test_create_replay_session( + self, test_admin: AsyncClient + ) -> None: + """Admin can create a replay session.""" + request = ReplayRequest( + replay_type=ReplayType.QUERY, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + speed_multiplier=1.0, + preserve_timestamps=False, + batch_size=100, + skip_errors=True, + enable_progress_tracking=True, + ) + response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + assert response.status_code == 200 + result = ReplayResponse.model_validate(response.json()) + + assert result.session_id is not None + assert result.status in list(ReplayStatus) + assert result.message is not None + + @pytest.mark.asyncio + async def test_create_replay_session_with_filter( + self, test_admin: AsyncClient + ) -> None: + """Create replay session with event filter.""" + request = ReplayRequest( + replay_type=ReplayType.QUERY, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(event_types=[EventType.EXECUTION_REQUESTED]), + batch_size=50, + max_events=1000, + ) + response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + assert response.status_code == 200 + result = ReplayResponse.model_validate(response.json()) + assert result.session_id is not None + + @pytest.mark.asyncio + async def test_create_replay_session_file_target( + self, test_admin: AsyncClient + ) -> None: + """Create replay session with file target.""" + request = ReplayRequest( + replay_type=ReplayType.QUERY, + target=ReplayTarget.FILE, + filter=ReplayFilter(), + target_file_path="/tmp/replay_export.json", + ) + response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + assert response.status_code == 200 + result = ReplayResponse.model_validate(response.json()) + assert result.session_id is not None + + @pytest.mark.asyncio + async def test_create_replay_session_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot create replay sessions.""" + response = await test_user.post( + "/api/v1/replay/sessions", + json={ + "replay_type": ReplayType.QUERY, + "target": ReplayTarget.KAFKA, + "filter": {}, + }, + ) + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_create_replay_session_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.post( + "/api/v1/replay/sessions", + json={ + "replay_type": ReplayType.QUERY, + "target": ReplayTarget.KAFKA, + "filter": {}, + }, + ) + assert response.status_code == 401 + + +class TestStartReplaySession: + """Tests for POST /api/v1/replay/sessions/{session_id}/start.""" + + @pytest.mark.asyncio + async def test_start_replay_session( + self, test_admin: AsyncClient + ) -> None: + """Start a created replay session.""" + # Create session first + request = ReplayRequest( + replay_type=ReplayType.TIME_RANGE, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + batch_size=10, + ) + create_response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + if create_response.status_code == 200: + session = ReplayResponse.model_validate(create_response.json()) + + # Start session + response = await test_admin.post( + f"/api/v1/replay/sessions/{session.session_id}/start" + ) + + # May be 200 or error depending on session state + if response.status_code == 200: + result = ReplayResponse.model_validate(response.json()) + assert result.session_id == session.session_id + assert result.status in [ + ReplayStatus.RUNNING, + ReplayStatus.COMPLETED, + ReplayStatus.FAILED, + ] + + @pytest.mark.asyncio + async def test_start_nonexistent_session( + self, test_admin: AsyncClient + ) -> None: + """Start nonexistent session returns 404.""" + response = await test_admin.post( + "/api/v1/replay/sessions/nonexistent-session/start" + ) + assert response.status_code == 404 + + +class TestPauseReplaySession: + """Tests for POST /api/v1/replay/sessions/{session_id}/pause.""" + + @pytest.mark.asyncio + async def test_pause_replay_session( + self, test_admin: AsyncClient + ) -> None: + """Pause a running replay session.""" + # Create and start session + request = ReplayRequest( + replay_type=ReplayType.TIME_RANGE, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + ) + create_response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + if create_response.status_code == 200: + session = ReplayResponse.model_validate(create_response.json()) + + # Start + await test_admin.post( + f"/api/v1/replay/sessions/{session.session_id}/start" + ) + + # Pause + response = await test_admin.post( + f"/api/v1/replay/sessions/{session.session_id}/pause" + ) + + # May succeed or fail depending on session state + if response.status_code == 200: + result = ReplayResponse.model_validate(response.json()) + assert result.session_id == session.session_id + + @pytest.mark.asyncio + async def test_pause_nonexistent_session( + self, test_admin: AsyncClient + ) -> None: + """Pause nonexistent session returns 404.""" + response = await test_admin.post( + "/api/v1/replay/sessions/nonexistent-session/pause" + ) + assert response.status_code == 404 + + +class TestResumeReplaySession: + """Tests for POST /api/v1/replay/sessions/{session_id}/resume.""" + + @pytest.mark.asyncio + async def test_resume_replay_session( + self, test_admin: AsyncClient + ) -> None: + """Resume a replay session.""" + request = ReplayRequest( + replay_type=ReplayType.TIME_RANGE, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + ) + create_response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + assert create_response.status_code == 200 + session = ReplayResponse.model_validate(create_response.json()) + + response = await test_admin.post( + f"/api/v1/replay/sessions/{session.session_id}/resume" + ) + assert response.status_code == 200 + result = ReplayResponse.model_validate(response.json()) + assert result.session_id == session.session_id + + @pytest.mark.asyncio + async def test_resume_nonexistent_session( + self, test_admin: AsyncClient + ) -> None: + """Resume nonexistent session returns 404.""" + response = await test_admin.post( + "/api/v1/replay/sessions/nonexistent-session/resume" + ) + assert response.status_code == 404 + + +class TestCancelReplaySession: + """Tests for POST /api/v1/replay/sessions/{session_id}/cancel.""" + + @pytest.mark.asyncio + async def test_cancel_replay_session( + self, test_admin: AsyncClient + ) -> None: + """Cancel a replay session.""" + # Create session + request = ReplayRequest( + replay_type=ReplayType.TIME_RANGE, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + ) + create_response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + + if create_response.status_code == 200: + session = ReplayResponse.model_validate(create_response.json()) + + # Cancel + response = await test_admin.post( + f"/api/v1/replay/sessions/{session.session_id}/cancel" + ) + + if response.status_code == 200: + result = ReplayResponse.model_validate(response.json()) + assert result.session_id == session.session_id + assert result.status in [ + ReplayStatus.CANCELLED, + ReplayStatus.COMPLETED, + ] + + @pytest.mark.asyncio + async def test_cancel_nonexistent_session( + self, test_admin: AsyncClient + ) -> None: + """Cancel nonexistent session returns 404.""" + response = await test_admin.post( + "/api/v1/replay/sessions/nonexistent-session/cancel" + ) + assert response.status_code == 404 + + +class TestListReplaySessions: + """Tests for GET /api/v1/replay/sessions.""" + + @pytest.mark.asyncio + async def test_list_replay_sessions( + self, test_admin: AsyncClient + ) -> None: + """List all replay sessions.""" + response = await test_admin.get("/api/v1/replay/sessions") + + assert response.status_code == 200 + sessions = [ + SessionSummary.model_validate(s) for s in response.json() + ] + + for session in sessions: + assert session.session_id is not None + assert session.replay_type in list(ReplayType) + assert session.target in list(ReplayTarget) + assert session.status in list(ReplayStatus) + assert session.total_events >= 0 + assert session.replayed_events >= 0 + assert session.failed_events >= 0 + assert session.skipped_events >= 0 + assert session.created_at is not None + + @pytest.mark.asyncio + async def test_list_replay_sessions_with_status_filter( + self, test_admin: AsyncClient + ) -> None: + """Filter sessions by status.""" + response = await test_admin.get( + "/api/v1/replay/sessions", + params={"status": ReplayStatus.COMPLETED}, + ) + + assert response.status_code == 200 + sessions = [ + SessionSummary.model_validate(s) for s in response.json() + ] + + for session in sessions: + assert session.status == ReplayStatus.COMPLETED + + @pytest.mark.asyncio + async def test_list_replay_sessions_with_limit( + self, test_admin: AsyncClient + ) -> None: + """Limit number of sessions returned.""" + response = await test_admin.get( + "/api/v1/replay/sessions", + params={"limit": 10}, + ) + + assert response.status_code == 200 + sessions = response.json() + assert len(sessions) <= 10 + + @pytest.mark.asyncio + async def test_list_replay_sessions_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot list replay sessions.""" + response = await test_user.get("/api/v1/replay/sessions") + assert response.status_code == 403 + + +class TestGetReplaySession: + """Tests for GET /api/v1/replay/sessions/{session_id}.""" + + @pytest.mark.asyncio + async def test_get_replay_session(self, test_admin: AsyncClient) -> None: + """Get a specific replay session.""" + request = ReplayRequest( + replay_type=ReplayType.QUERY, + target=ReplayTarget.KAFKA, + filter=ReplayFilter(), + ) + create_response = await test_admin.post( + "/api/v1/replay/sessions", json=request.model_dump() + ) + assert create_response.status_code == 200 + created = ReplayResponse.model_validate(create_response.json()) + + response = await test_admin.get( + f"/api/v1/replay/sessions/{created.session_id}" + ) + assert response.status_code == 200 + session = ReplaySession.model_validate(response.json()) + + assert session.session_id == created.session_id + assert session.config is not None + assert session.status in list(ReplayStatus) + assert session.total_events >= 0 + assert session.replayed_events >= 0 + assert session.failed_events >= 0 + assert session.created_at is not None + + @pytest.mark.asyncio + async def test_get_replay_session_not_found( + self, test_admin: AsyncClient + ) -> None: + """Get nonexistent session returns 404.""" + response = await test_admin.get( + "/api/v1/replay/sessions/nonexistent-session" + ) + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_replay_session_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot get replay sessions.""" + response = await test_user.get( + "/api/v1/replay/sessions/some-session-id" + ) + assert response.status_code == 403 + + +class TestCleanupOldSessions: + """Tests for POST /api/v1/replay/cleanup.""" + + @pytest.mark.asyncio + async def test_cleanup_old_sessions( + self, test_admin: AsyncClient + ) -> None: + """Cleanup old replay sessions.""" + response = await test_admin.post( + "/api/v1/replay/cleanup", + params={"older_than_hours": 24}, + ) + + assert response.status_code == 200 + result = CleanupResponse.model_validate(response.json()) + + assert result.removed_sessions >= 0 + assert result.message is not None + + @pytest.mark.asyncio + async def test_cleanup_old_sessions_custom_hours( + self, test_admin: AsyncClient + ) -> None: + """Cleanup with custom time threshold.""" + response = await test_admin.post( + "/api/v1/replay/cleanup", + params={"older_than_hours": 168}, # 1 week + ) + + assert response.status_code == 200 + result = CleanupResponse.model_validate(response.json()) + assert result.removed_sessions >= 0 + + @pytest.mark.asyncio + async def test_cleanup_forbidden_for_regular_user( + self, test_user: AsyncClient + ) -> None: + """Regular user cannot cleanup sessions.""" + response = await test_user.post("/api/v1/replay/cleanup") + assert response.status_code == 403 + + @pytest.mark.asyncio + async def test_cleanup_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.post("/api/v1/replay/cleanup") + assert response.status_code == 401 diff --git a/backend/tests/e2e/test_resource_cleaner.py b/backend/tests/e2e/test_resource_cleaner.py new file mode 100644 index 00000000..30f6955c --- /dev/null +++ b/backend/tests/e2e/test_resource_cleaner.py @@ -0,0 +1,100 @@ +import asyncio +from datetime import datetime + +import pytest +import pytest_asyncio +from app.core.k8s_clients import K8sClients +from app.services.result_processor.resource_cleaner import ResourceCleaner +from app.settings import Settings +from dishka import AsyncContainer +from kubernetes import client as k8s_client + +pytestmark = [pytest.mark.e2e, pytest.mark.k8s] + + +@pytest_asyncio.fixture +async def k8s_clients(scope: AsyncContainer) -> K8sClients: + """Get K8sClients from DI container.""" + return await scope.get(K8sClients) + + +@pytest_asyncio.fixture +async def resource_cleaner(scope: AsyncContainer) -> ResourceCleaner: + """Get ResourceCleaner from DI container.""" + return await scope.get(ResourceCleaner) + + +@pytest.mark.asyncio +async def test_get_resource_usage( + resource_cleaner: ResourceCleaner, test_settings: Settings +) -> None: + usage = await resource_cleaner.get_resource_usage(namespace=test_settings.K8S_NAMESPACE) + assert set(usage.keys()) >= {"pods", "configmaps", "network_policies"} + + +@pytest.mark.asyncio +async def test_cleanup_orphaned_resources_dry_run( + resource_cleaner: ResourceCleaner, test_settings: Settings +) -> None: + cleaned = await resource_cleaner.cleanup_orphaned_resources( + namespace=test_settings.K8S_NAMESPACE, + max_age_hours=0, + dry_run=True, + ) + assert set(cleaned.keys()) >= {"pods", "configmaps", "pvcs"} + + +@pytest.mark.asyncio +async def test_cleanup_nonexistent_pod( + resource_cleaner: ResourceCleaner, test_settings: Settings +) -> None: + namespace = test_settings.K8S_NAMESPACE + nonexistent_pod = "integr8s-test-nonexistent-pod" + + # Use a local timeout variable with buffer for scheduler jitter + timeout = 2 # Reduced from 5s since non-existent resources return immediately (404) + jitter_buffer = 0.5 # Account for scheduler/GC pauses + + start_time = asyncio.get_running_loop().time() + await resource_cleaner.cleanup_pod_resources( + pod_name=nonexistent_pod, + namespace=namespace, + execution_id="test-exec-nonexistent", + timeout=timeout, + ) + elapsed = asyncio.get_running_loop().time() - start_time + + assert elapsed < timeout + jitter_buffer, ( + f"Cleanup took {elapsed:.2f}s, expected < {timeout + jitter_buffer}s for non-existent resources" + ) + + usage = await resource_cleaner.get_resource_usage(namespace=namespace) + assert isinstance(usage.get("pods", 0), int) + assert isinstance(usage.get("configmaps", 0), int) + + +@pytest.mark.asyncio +async def test_cleanup_orphaned_configmaps_dry_run( + k8s_clients: K8sClients, resource_cleaner: ResourceCleaner, test_settings: Settings +) -> None: + v1 = k8s_clients.v1 + ns = test_settings.K8S_NAMESPACE + name = f"int-test-cm-{int(datetime.now().timestamp())}" + + metadata = k8s_client.V1ObjectMeta( + name=name, + labels={"app": "integr8s", "execution-id": "e-int-test"}, + ) + body = k8s_client.V1ConfigMap(metadata=metadata, data={"k": "v"}) + v1.create_namespaced_config_map(namespace=ns, body=body) + + try: + res = await resource_cleaner.cleanup_orphaned_resources(namespace=ns, max_age_hours=0, dry_run=True) + assert any(name == cm for cm in res.get("configmaps", [])), ( + f"Expected ConfigMap '{name}' to be detected as orphan candidate" + ) + finally: + try: + v1.delete_namespaced_config_map(name=name, namespace=ns) + except Exception: + pass diff --git a/backend/tests/e2e/test_resource_cleaner_k8s.py b/backend/tests/e2e/test_resource_cleaner_k8s.py deleted file mode 100644 index e4a79fa8..00000000 --- a/backend/tests/e2e/test_resource_cleaner_k8s.py +++ /dev/null @@ -1,61 +0,0 @@ -import asyncio -import logging - -import pytest -from app.services.result_processor.resource_cleaner import ResourceCleaner -from app.settings import Settings - -pytestmark = [pytest.mark.e2e, pytest.mark.k8s] - -_test_logger = logging.getLogger("test.k8s.resource_cleaner_k8s") - - -@pytest.mark.asyncio -async def test_initialize_and_get_usage(test_settings: Settings) -> None: - rc = ResourceCleaner(logger=_test_logger) - await rc.initialize() - usage = await rc.get_resource_usage(namespace=test_settings.K8S_NAMESPACE) - assert set(usage.keys()) >= {"pods", "configmaps", "network_policies"} - - -@pytest.mark.asyncio -async def test_cleanup_orphaned_resources_dry_run(test_settings: Settings) -> None: - rc = ResourceCleaner(logger=_test_logger) - await rc.initialize() - cleaned = await rc.cleanup_orphaned_resources( - namespace=test_settings.K8S_NAMESPACE, - max_age_hours=0, - dry_run=True, - ) - assert set(cleaned.keys()) >= {"pods", "configmaps", "pvcs"} - - -@pytest.mark.asyncio -async def test_cleanup_nonexistent_pod(test_settings: Settings) -> None: - rc = ResourceCleaner(logger=_test_logger) - await rc.initialize() - - # Attempt to delete a pod that doesn't exist - should complete without errors - namespace = test_settings.K8S_NAMESPACE - nonexistent_pod = "integr8s-test-nonexistent-pod" - - # Should complete within timeout and not raise any exceptions - start_time = asyncio.get_running_loop().time() - await rc.cleanup_pod_resources( - pod_name=nonexistent_pod, - namespace=namespace, - execution_id="test-exec-nonexistent", - timeout=5, - ) - elapsed = asyncio.get_running_loop().time() - start_time - - # Verify it completed quickly (not waiting full timeout for non-existent resources) - assert elapsed < 5, f"Cleanup took {elapsed}s, should be quick for non-existent resources" - - # Verify no resources exist with this name (should be empty/zero) - usage = await rc.get_resource_usage(namespace=namespace) - - # usage returns counts (int), not lists - # Just check that we got a valid usage report - assert isinstance(usage.get("pods", 0), int) - assert isinstance(usage.get("configmaps", 0), int) diff --git a/backend/tests/e2e/test_resource_cleaner_orphan.py b/backend/tests/e2e/test_resource_cleaner_orphan.py deleted file mode 100644 index 334b7b29..00000000 --- a/backend/tests/e2e/test_resource_cleaner_orphan.py +++ /dev/null @@ -1,50 +0,0 @@ -import logging -from datetime import datetime - -import pytest -from app.services.result_processor.resource_cleaner import ResourceCleaner -from app.settings import Settings -from kubernetes import client as k8s_client -from kubernetes import config as k8s_config - -pytestmark = [pytest.mark.e2e, pytest.mark.k8s] - -_test_logger = logging.getLogger("test.k8s.resource_cleaner_orphan") - - -def _ensure_kubeconfig() -> None: - try: - k8s_config.load_incluster_config() - except Exception: - k8s_config.load_kube_config() - - -@pytest.mark.asyncio -async def test_cleanup_orphaned_configmaps_dry_run(test_settings: Settings) -> None: - _ensure_kubeconfig() - v1 = k8s_client.CoreV1Api() - ns = test_settings.K8S_NAMESPACE - name = f"int-test-cm-{int(datetime.now().timestamp())}" - - # Create a configmap labeled like the app uses - metadata = k8s_client.V1ObjectMeta( - name=name, - labels={"app": "integr8s", "execution-id": "e-int-test"}, - ) - body = k8s_client.V1ConfigMap(metadata=metadata, data={"k": "v"}) - v1.create_namespaced_config_map(namespace=ns, body=body) - - try: - cleaner = ResourceCleaner(logger=_test_logger) - # Force as orphaned by using a large cutoff - ConfigMap created synchronously, available now - res = await cleaner.cleanup_orphaned_resources(namespace=ns, max_age_hours=0, dry_run=True) - # ConfigMap should be detected immediately - assert any(name == cm for cm in res.get("configmaps", [])), ( - f"Expected ConfigMap '{name}' to be detected as orphan candidate" - ) - finally: - # Cleanup resource - try: - v1.delete_namespaced_config_map(name=name, namespace=ns) - except Exception: - pass diff --git a/backend/tests/e2e/test_saga_routes.py b/backend/tests/e2e/test_saga_routes.py new file mode 100644 index 00000000..e4cd6378 --- /dev/null +++ b/backend/tests/e2e/test_saga_routes.py @@ -0,0 +1,299 @@ +import asyncio + +import pytest +from app.domain.enums.saga import SagaState +from app.schemas_pydantic.execution import ExecutionRequest, ExecutionResponse +from app.schemas_pydantic.saga import ( + SagaCancellationResponse, + SagaListResponse, + SagaStatusResponse, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e, pytest.mark.kafka] + + +async def wait_for_saga( + client: AsyncClient, + execution_id: str, + timeout: float = 30.0, + poll_interval: float = 0.5, +) -> SagaStatusResponse: + """Poll until at least one saga exists for the execution. + + Args: + client: Authenticated HTTP client + execution_id: ID of execution to get saga for + timeout: Maximum time to wait in seconds + poll_interval: Time between polls in seconds + + Returns: + First saga for the execution + + Raises: + TimeoutError: If no saga appears within timeout + AssertionError: If API returns unexpected status code + """ + deadline = asyncio.get_event_loop().time() + timeout + + while asyncio.get_event_loop().time() < deadline: + response = await client.get(f"/api/v1/sagas/execution/{execution_id}") + assert response.status_code == 200, f"Unexpected: {response.status_code} - {response.text}" + + result = SagaListResponse.model_validate(response.json()) + if result.sagas: + return result.sagas[0] + + await asyncio.sleep(poll_interval) + + raise TimeoutError(f"No saga appeared for execution {execution_id} within {timeout}s") + + +class TestGetSagaStatus: + """Tests for GET /api/v1/sagas/{saga_id}.""" + + @pytest.mark.asyncio + async def test_get_saga_status( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get saga status by ID returns valid response.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get(f"/api/v1/sagas/{saga.saga_id}") + + assert response.status_code == 200 + result = SagaStatusResponse.model_validate(response.json()) + assert result.saga_id == saga.saga_id + assert result.execution_id == created_execution.execution_id + assert result.state in list(SagaState) + assert result.saga_name is not None + assert result.created_at is not None + assert result.updated_at is not None + assert result.retry_count >= 0 + + @pytest.mark.asyncio + async def test_get_saga_not_found(self, test_user: AsyncClient) -> None: + """Get nonexistent saga returns 404.""" + response = await test_user.get("/api/v1/sagas/nonexistent-saga-id") + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_saga_access_denied( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """Cannot access another user's saga.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await another_user.get(f"/api/v1/sagas/{saga.saga_id}") + + assert response.status_code == 403 + + +class TestGetExecutionSagas: + """Tests for GET /api/v1/sagas/execution/{execution_id}.""" + + @pytest.mark.asyncio + async def test_get_execution_sagas( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Get sagas for a specific execution.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get( + f"/api/v1/sagas/execution/{created_execution.execution_id}" + ) + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + + assert result.total >= 1 + assert len(result.sagas) >= 1 + assert isinstance(result.has_more, bool) + + saga_ids = [s.saga_id for s in result.sagas] + assert saga.saga_id in saga_ids + + @pytest.mark.asyncio + async def test_get_execution_sagas_with_pagination( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Pagination works for execution sagas.""" + await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get( + f"/api/v1/sagas/execution/{created_execution.execution_id}", + params={"limit": 5, "skip": 0}, + ) + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + assert result.limit == 5 + assert result.skip == 0 + + @pytest.mark.asyncio + async def test_get_execution_sagas_with_state_filter( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Filter sagas by state.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get( + f"/api/v1/sagas/execution/{created_execution.execution_id}", + params={"state": saga.state.value}, + ) + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + assert len(result.sagas) >= 1 + for s in result.sagas: + assert s.state == saga.state + + +class TestListSagas: + """Tests for GET /api/v1/sagas/.""" + + @pytest.mark.asyncio + async def test_list_sagas( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """List sagas for current user.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get("/api/v1/sagas/") + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + + assert result.total >= 1 + assert len(result.sagas) >= 1 + + saga_ids = [s.saga_id for s in result.sagas] + assert saga.saga_id in saga_ids + + @pytest.mark.asyncio + async def test_list_sagas_with_state_filter( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Filter sagas by state.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get( + "/api/v1/sagas/", + params={"state": saga.state.value}, + ) + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + + for s in result.sagas: + assert s.state == saga.state + + @pytest.mark.asyncio + async def test_list_sagas_pagination( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Pagination works for saga list.""" + await wait_for_saga(test_user, created_execution.execution_id) + + response = await test_user.get( + "/api/v1/sagas/", + params={"limit": 10, "skip": 0}, + ) + + assert response.status_code == 200 + result = SagaListResponse.model_validate(response.json()) + assert result.limit == 10 + assert result.skip == 0 + + @pytest.mark.asyncio + async def test_list_sagas_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/sagas/") + + assert response.status_code == 401 + + +class TestCancelSaga: + """Tests for POST /api/v1/sagas/{saga_id}/cancel.""" + + @pytest.mark.asyncio + async def test_cancel_saga( + self, + test_user: AsyncClient, + long_running_execution_request: ExecutionRequest, + ) -> None: + """Cancel a running saga.""" + exec_response = await test_user.post( + "/api/v1/execute", json=long_running_execution_request.model_dump() + ) + assert exec_response.status_code == 200 + + execution = ExecutionResponse.model_validate(exec_response.json()) + saga = await wait_for_saga(test_user, execution.execution_id) + + response = await test_user.post(f"/api/v1/sagas/{saga.saga_id}/cancel") + + assert response.status_code == 200 + result = SagaCancellationResponse.model_validate(response.json()) + assert result.saga_id == saga.saga_id + assert isinstance(result.success, bool) + assert result.message is not None + + @pytest.mark.asyncio + async def test_cancel_nonexistent_saga( + self, test_user: AsyncClient + ) -> None: + """Cancel nonexistent saga returns 404.""" + response = await test_user.post( + "/api/v1/sagas/nonexistent-saga-id/cancel" + ) + + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_cancel_other_users_saga_forbidden( + self, + test_user: AsyncClient, + another_user: AsyncClient, + long_running_execution_request: ExecutionRequest, + ) -> None: + """Cannot cancel another user's saga.""" + exec_response = await test_user.post( + "/api/v1/execute", json=long_running_execution_request.model_dump() + ) + assert exec_response.status_code == 200 + + execution = ExecutionResponse.model_validate(exec_response.json()) + saga = await wait_for_saga(test_user, execution.execution_id) + + response = await another_user.post(f"/api/v1/sagas/{saga.saga_id}/cancel") + + assert response.status_code == 403 + + +class TestSagaIsolation: + """Tests for saga access isolation between users.""" + + @pytest.mark.asyncio + async def test_user_cannot_see_other_users_sagas( + self, + test_user: AsyncClient, + another_user: AsyncClient, + created_execution: ExecutionResponse, + ) -> None: + """User's saga list does not include other users' sagas.""" + saga = await wait_for_saga(test_user, created_execution.execution_id) + + response = await another_user.get("/api/v1/sagas/") + assert response.status_code == 200 + + result = SagaListResponse.model_validate(response.json()) + saga_ids = [s.saga_id for s in result.sagas] + + assert saga.saga_id not in saga_ids diff --git a/backend/tests/e2e/test_saved_scripts_routes.py b/backend/tests/e2e/test_saved_scripts_routes.py new file mode 100644 index 00000000..e06564f9 --- /dev/null +++ b/backend/tests/e2e/test_saved_scripts_routes.py @@ -0,0 +1,231 @@ + +import pytest +from app.schemas_pydantic.saved_script import ( + SavedScriptCreateRequest, + SavedScriptResponse, + SavedScriptUpdate, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + + +class TestCreateSavedScript: + """Tests for POST /api/v1/scripts.""" + + @pytest.mark.asyncio + async def test_create_saved_script( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """Create a new saved script.""" + response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + + assert response.status_code == 200 + script = SavedScriptResponse.model_validate(response.json()) + + assert script.script_id is not None + assert script.name == new_script_request.name + assert script.script == new_script_request.script + assert script.lang == "python" + assert script.created_at is not None + assert script.updated_at is not None + + @pytest.mark.asyncio + async def test_create_saved_script_minimal( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """Create script with minimal required fields.""" + response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + + assert response.status_code == 200 + script = SavedScriptResponse.model_validate(response.json()) + assert script.name == new_script_request.name + + @pytest.mark.asyncio + async def test_create_saved_script_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.post( + "/api/v1/scripts", + json={ + "name": "Unauthorized Script", + "script": "pass", + "lang": "python", + }, + ) + assert response.status_code == 401 + + +class TestListSavedScripts: + """Tests for GET /api/v1/scripts.""" + + @pytest.mark.asyncio + async def test_list_saved_scripts( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """List user's saved scripts.""" + await test_user.post("/api/v1/scripts", json=new_script_request.model_dump()) + + response = await test_user.get("/api/v1/scripts") + + assert response.status_code == 200 + scripts = [ + SavedScriptResponse.model_validate(s) for s in response.json() + ] + + assert len(scripts) >= 1 + assert any(new_script_request.name in s.name for s in scripts) + + @pytest.mark.asyncio + async def test_list_saved_scripts_only_own( + self, test_user: AsyncClient, another_user: AsyncClient, + new_script_request: SavedScriptCreateRequest + ) -> None: + """User only sees their own scripts.""" + await test_user.post("/api/v1/scripts", json=new_script_request.model_dump()) + + response = await another_user.get("/api/v1/scripts") + assert response.status_code == 200 + scripts = response.json() + + # Should not contain test_user's script + assert not any(new_script_request.name in s["name"] for s in scripts) + + +class TestGetSavedScript: + """Tests for GET /api/v1/scripts/{script_id}.""" + + @pytest.mark.asyncio + async def test_get_saved_script( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """Get a specific saved script.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + created = SavedScriptResponse.model_validate(create_response.json()) + + response = await test_user.get(f"/api/v1/scripts/{created.script_id}") + + assert response.status_code == 200 + script = SavedScriptResponse.model_validate(response.json()) + assert script.script_id == created.script_id + assert script.name == new_script_request.name + assert script.script == new_script_request.script + + @pytest.mark.asyncio + async def test_get_nonexistent_script( + self, test_user: AsyncClient + ) -> None: + """Get nonexistent script returns 404.""" + response = await test_user.get("/api/v1/scripts/nonexistent-id") + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_get_other_users_script_forbidden( + self, test_user: AsyncClient, another_user: AsyncClient, + new_script_request: SavedScriptCreateRequest + ) -> None: + """Cannot get another user's script.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + script_id = create_response.json()["script_id"] + + response = await another_user.get(f"/api/v1/scripts/{script_id}") + assert response.status_code == 404 + + +class TestUpdateSavedScript: + """Tests for PUT /api/v1/scripts/{script_id}.""" + + @pytest.mark.asyncio + async def test_update_saved_script( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """Update a saved script.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + script = SavedScriptResponse.model_validate(create_response.json()) + + update_request = SavedScriptUpdate( + name="Updated Name", + script="new content", + ) + response = await test_user.put( + f"/api/v1/scripts/{script.script_id}", + json=update_request.model_dump(exclude_unset=True), + ) + + assert response.status_code == 200 + updated = SavedScriptResponse.model_validate(response.json()) + assert updated.name == "Updated Name" + assert updated.script == "new content" + assert updated.updated_at > script.updated_at + + @pytest.mark.asyncio + async def test_update_other_users_script_forbidden( + self, test_user: AsyncClient, another_user: AsyncClient, + new_script_request: SavedScriptCreateRequest + ) -> None: + """Cannot update another user's script.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + script_id = create_response.json()["script_id"] + + update_request = SavedScriptUpdate(name="Hacked") + response = await another_user.put( + f"/api/v1/scripts/{script_id}", + json=update_request.model_dump(exclude_unset=True), + ) + assert response.status_code == 404 + + +class TestDeleteSavedScript: + """Tests for DELETE /api/v1/scripts/{script_id}.""" + + @pytest.mark.asyncio + async def test_delete_saved_script( + self, test_user: AsyncClient, new_script_request: SavedScriptCreateRequest + ) -> None: + """Delete a saved script returns 204.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + script_id = create_response.json()["script_id"] + + response = await test_user.delete(f"/api/v1/scripts/{script_id}") + assert response.status_code == 204 + + get_response = await test_user.get(f"/api/v1/scripts/{script_id}") + assert get_response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_other_users_script_forbidden( + self, test_user: AsyncClient, another_user: AsyncClient, + new_script_request: SavedScriptCreateRequest + ) -> None: + """Cannot delete another user's script.""" + create_response = await test_user.post( + "/api/v1/scripts", json=new_script_request.model_dump() + ) + assert create_response.status_code == 200 + script_id = create_response.json()["script_id"] + + response = await another_user.delete(f"/api/v1/scripts/{script_id}") + assert response.status_code == 404 + + @pytest.mark.asyncio + async def test_delete_nonexistent_script( + self, test_user: AsyncClient + ) -> None: + """Deleting nonexistent script returns 404.""" + response = await test_user.delete("/api/v1/scripts/nonexistent-id") + assert response.status_code == 404 diff --git a/backend/tests/e2e/test_sse_routes.py b/backend/tests/e2e/test_sse_routes.py new file mode 100644 index 00000000..b12ebcc9 --- /dev/null +++ b/backend/tests/e2e/test_sse_routes.py @@ -0,0 +1,105 @@ +import anyio +import pytest +from app.domain.enums.sse import SSEHealthStatus +from app.schemas_pydantic.execution import ExecutionResponse +from app.schemas_pydantic.sse import SSEHealthResponse +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + +SSE_TIMEOUT_SECONDS = 5.0 # Timeout for SSE connection establishment + + +class TestSSEHealth: + """Tests for GET /api/v1/events/health.""" + + @pytest.mark.asyncio + async def test_sse_health(self, test_user: AsyncClient) -> None: + """Get SSE service health status.""" + response = await test_user.get("/api/v1/events/health") + + assert response.status_code == 200 + result = SSEHealthResponse.model_validate(response.json()) + + assert result.status == SSEHealthStatus.HEALTHY + assert isinstance(result.kafka_enabled, bool) + assert result.active_connections >= 0 + assert result.active_executions >= 0 + assert result.active_consumers >= 0 + assert result.max_connections_per_user >= 0 + assert result.timestamp is not None + + @pytest.mark.asyncio + async def test_sse_health_unauthenticated( + self, client: AsyncClient + ) -> None: + """SSE health requires authentication.""" + response = await client.get("/api/v1/events/health") + assert response.status_code == 401 + + +class TestNotificationStream: + """Tests for GET /api/v1/events/notifications/stream.""" + + @pytest.mark.asyncio + async def test_notification_stream_returns_event_stream( + self, test_user: AsyncClient + ) -> None: + """Notification stream returns SSE content type.""" + with anyio.fail_after(SSE_TIMEOUT_SECONDS): + async with test_user.stream( + "GET", "/api/v1/events/notifications/stream" + ) as response: + assert response.status_code == 200 + content_type = response.headers.get("content-type", "") + assert "text/event-stream" in content_type + + @pytest.mark.asyncio + async def test_notification_stream_unauthenticated( + self, client: AsyncClient + ) -> None: + """Notification stream requires authentication.""" + response = await client.get("/api/v1/events/notifications/stream") + assert response.status_code == 401 + + +class TestExecutionStream: + """Tests for GET /api/v1/events/executions/{execution_id}.""" + + @pytest.mark.asyncio + async def test_execution_stream_returns_event_stream( + self, test_user: AsyncClient, created_execution: ExecutionResponse + ) -> None: + """Execution events stream returns SSE content type.""" + with anyio.fail_after(SSE_TIMEOUT_SECONDS): + async with test_user.stream( + "GET", f"/api/v1/events/executions/{created_execution.execution_id}" + ) as response: + assert response.status_code == 200 + content_type = response.headers.get("content-type", "") + assert "text/event-stream" in content_type + + @pytest.mark.asyncio + async def test_execution_stream_unauthenticated( + self, client: AsyncClient + ) -> None: + """Execution stream requires authentication.""" + response = await client.get("/api/v1/events/executions/some-id") + assert response.status_code == 401 + + @pytest.mark.asyncio + async def test_execution_stream_other_users_execution( + self, test_user: AsyncClient, another_user: AsyncClient, + created_execution: ExecutionResponse + ) -> None: + """Streaming another user's execution opens but events are filtered.""" + # SSE endpoints return 200 and start streaming - authorization + # happens at event level (user won't receive events for executions + # they don't own). We verify the stream opens with correct content-type. + with anyio.fail_after(SSE_TIMEOUT_SECONDS): + async with another_user.stream( + "GET", f"/api/v1/events/executions/{created_execution.execution_id}" + ) as response: + assert response.status_code == 200 + content_type = response.headers.get("content-type", "") + assert "text/event-stream" in content_type diff --git a/backend/tests/e2e/test_user_settings_routes.py b/backend/tests/e2e/test_user_settings_routes.py new file mode 100644 index 00000000..12a3424c --- /dev/null +++ b/backend/tests/e2e/test_user_settings_routes.py @@ -0,0 +1,307 @@ +import pytest +from app.domain.enums.common import Theme +from app.schemas_pydantic.user_settings import ( + EditorSettings, + NotificationSettings, + RestoreSettingsRequest, + SettingsHistoryResponse, + ThemeUpdateRequest, + UserSettings, + UserSettingsUpdate, +) +from httpx import AsyncClient + +pytestmark = [pytest.mark.e2e] + + +class TestGetUserSettings: + """Tests for GET /api/v1/user/settings/.""" + + @pytest.mark.asyncio + async def test_get_user_settings(self, test_user: AsyncClient) -> None: + """Get current user settings.""" + response = await test_user.get("/api/v1/user/settings/") + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + + assert settings.theme in [Theme.LIGHT, Theme.DARK, Theme.AUTO] + assert settings.timezone is not None + assert settings.notifications is not None + assert settings.editor is not None + + @pytest.mark.asyncio + async def test_get_user_settings_unauthenticated( + self, client: AsyncClient + ) -> None: + """Unauthenticated request returns 401.""" + response = await client.get("/api/v1/user/settings/") + assert response.status_code == 401 + + +class TestUpdateUserSettings: + """Tests for PUT /api/v1/user/settings/.""" + + @pytest.mark.asyncio + async def test_update_user_settings_full( + self, test_user: AsyncClient + ) -> None: + """Update all user settings.""" + request = UserSettingsUpdate( + theme=Theme.DARK, + timezone="America/New_York", + notifications=NotificationSettings( + execution_completed=True, + execution_failed=True, + system_updates=False, + security_alerts=True, + ), + editor=EditorSettings( + tab_size=4, + font_size=14, + show_line_numbers=True, + word_wrap=False, + ), + ) + response = await test_user.put( + "/api/v1/user/settings/", + json=request.model_dump(exclude_unset=True), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + + assert settings.theme == Theme.DARK + assert settings.timezone == "America/New_York" + + @pytest.mark.asyncio + async def test_update_user_settings_partial( + self, test_user: AsyncClient + ) -> None: + """Update only some settings.""" + request = UserSettingsUpdate(theme=Theme.LIGHT) + response = await test_user.put( + "/api/v1/user/settings/", + json=request.model_dump(exclude_unset=True), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.theme == Theme.LIGHT + + +class TestUpdateTheme: + """Tests for PUT /api/v1/user/settings/theme.""" + + @pytest.mark.asyncio + async def test_update_theme_dark(self, test_user: AsyncClient) -> None: + """Update theme to dark.""" + request = ThemeUpdateRequest(theme=Theme.DARK) + response = await test_user.put( + "/api/v1/user/settings/theme", + json=request.model_dump(), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.theme == Theme.DARK + + @pytest.mark.asyncio + async def test_update_theme_light(self, test_user: AsyncClient) -> None: + """Update theme to light.""" + request = ThemeUpdateRequest(theme=Theme.LIGHT) + response = await test_user.put( + "/api/v1/user/settings/theme", + json=request.model_dump(), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.theme == Theme.LIGHT + + @pytest.mark.asyncio + async def test_update_theme_system(self, test_user: AsyncClient) -> None: + """Update theme to system.""" + request = ThemeUpdateRequest(theme=Theme.AUTO) + response = await test_user.put( + "/api/v1/user/settings/theme", + json=request.model_dump(), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.theme == Theme.AUTO + + +class TestUpdateNotificationSettings: + """Tests for PUT /api/v1/user/settings/notifications.""" + + @pytest.mark.asyncio + async def test_update_notification_settings( + self, test_user: AsyncClient + ) -> None: + """Update notification settings.""" + request = NotificationSettings( + execution_completed=True, + execution_failed=True, + system_updates=True, + security_alerts=True, + ) + response = await test_user.put( + "/api/v1/user/settings/notifications", + json=request.model_dump(), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.notifications is not None + + +class TestUpdateEditorSettings: + """Tests for PUT /api/v1/user/settings/editor.""" + + @pytest.mark.asyncio + async def test_update_editor_settings( + self, test_user: AsyncClient + ) -> None: + """Update editor settings.""" + request = EditorSettings( + tab_size=2, + font_size=16, + show_line_numbers=True, + word_wrap=True, + ) + response = await test_user.put( + "/api/v1/user/settings/editor", + json=request.model_dump(), + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.editor.tab_size == 2 + assert settings.editor.font_size == 16 + assert settings.editor.word_wrap is True + + @pytest.mark.asyncio + async def test_update_editor_settings_partial( + self, test_user: AsyncClient + ) -> None: + """Update only some editor settings.""" + response = await test_user.put( + "/api/v1/user/settings/editor", + json={"tab_size": 4}, + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert settings.editor.tab_size == 4 + + +class TestSettingsHistory: + """Tests for GET /api/v1/user/settings/history.""" + + @pytest.mark.asyncio + async def test_get_settings_history(self, test_user: AsyncClient) -> None: + """Get settings change history.""" + # Make a change first to ensure history exists + request = ThemeUpdateRequest(theme=Theme.DARK) + update_response = await test_user.put( + "/api/v1/user/settings/theme", + json=request.model_dump(), + ) + assert update_response.status_code == 200 + + response = await test_user.get( + "/api/v1/user/settings/history", + params={"limit": 10}, + ) + + assert response.status_code == 200 + history = SettingsHistoryResponse.model_validate(response.json()) + assert history.limit == 10 + assert isinstance(history.history, list) + assert len(history.history) >= 1 + + @pytest.mark.asyncio + async def test_get_settings_history_default_limit( + self, test_user: AsyncClient + ) -> None: + """History uses default limit.""" + response = await test_user.get("/api/v1/user/settings/history") + + assert response.status_code == 200 + history = SettingsHistoryResponse.model_validate(response.json()) + assert isinstance(history.history, list) + + +class TestRestoreSettings: + """Tests for POST /api/v1/user/settings/restore.""" + + @pytest.mark.asyncio + async def test_restore_settings(self, test_user: AsyncClient) -> None: + """Restore settings to a previous point.""" + # Make a change first to ensure history exists + request = ThemeUpdateRequest(theme=Theme.DARK) + update_response = await test_user.put( + "/api/v1/user/settings/theme", + json=request.model_dump(), + ) + assert update_response.status_code == 200 + + # Get history + history_response = await test_user.get("/api/v1/user/settings/history") + assert history_response.status_code == 200 + + history = SettingsHistoryResponse.model_validate(history_response.json()) + assert len(history.history) >= 1, "No history entries found after settings update" + + # Restore to first entry + restore_req = RestoreSettingsRequest(timestamp=history.history[0].timestamp) + restore_response = await test_user.post( + "/api/v1/user/settings/restore", + json=restore_req.model_dump(mode="json"), + ) + + assert restore_response.status_code == 200 + restored = UserSettings.model_validate(restore_response.json()) + assert restored.theme is not None + + +class TestCustomSettings: + """Tests for PUT /api/v1/user/settings/custom/{key}.""" + + @pytest.mark.asyncio + async def test_update_custom_setting(self, test_user: AsyncClient) -> None: + """Update a custom setting.""" + response = await test_user.put( + "/api/v1/user/settings/custom/my_preference", + json={"value": "custom_value", "nested": {"key": 123}}, + ) + + assert response.status_code == 200 + settings = UserSettings.model_validate(response.json()) + assert "my_preference" in settings.custom_settings + + @pytest.mark.asyncio + async def test_update_multiple_custom_settings( + self, test_user: AsyncClient + ) -> None: + """Update multiple custom settings.""" + # First setting + first_response = await test_user.put( + "/api/v1/user/settings/custom/setting_one", + json={"value": 1}, + ) + assert first_response.status_code == 200 + first_settings = UserSettings.model_validate(first_response.json()) + assert "setting_one" in first_settings.custom_settings + + # Second setting + second_response = await test_user.put( + "/api/v1/user/settings/custom/setting_two", + json={"value": 2}, + ) + assert second_response.status_code == 200 + second_settings = UserSettings.model_validate(second_response.json()) + assert "setting_one" in second_settings.custom_settings + assert "setting_two" in second_settings.custom_settings diff --git a/backend/tests/helpers/__init__.py b/backend/tests/helpers/__init__.py deleted file mode 100644 index 31402bb5..00000000 --- a/backend/tests/helpers/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Helper utilities for tests (async polling, Kafka utilities, event factories).""" - -from .auth import AuthResult, login_user -from .events import make_execution_requested_event - -__all__ = ["AuthResult", "login_user", "make_execution_requested_event"] diff --git a/backend/tests/helpers/auth.py b/backend/tests/helpers/auth.py deleted file mode 100644 index a7d8b947..00000000 --- a/backend/tests/helpers/auth.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import TypedDict - -from httpx import AsyncClient - - -class AuthResult(TypedDict): - """Result of a login operation with CSRF token.""" - - csrf_token: str - headers: dict[str, str] - - -async def login_user(client: AsyncClient, username: str, password: str) -> AuthResult: - """Login a user and return CSRF token and headers for subsequent requests. - - Use this helper when tests need to switch users or re-authenticate. - The returned headers dict should be passed to POST/PUT/DELETE requests. - - Args: - client: The httpx AsyncClient - username: Username to login with - password: Password for the user - - Returns: - AuthResult with csrf_token and headers dict containing X-CSRF-Token - - Raises: - AssertionError: If login fails - """ - response = await client.post( - "/api/v1/auth/login", - data={"username": username, "password": password}, - ) - assert response.status_code == 200, f"Login failed: {response.text}" - - json_data: dict[str, str] = response.json() - csrf_token = json_data.get("csrf_token", "") - - return AuthResult( - csrf_token=csrf_token, - headers={"X-CSRF-Token": csrf_token}, - ) diff --git a/backend/tests/helpers/cleanup.py b/backend/tests/helpers/cleanup.py deleted file mode 100644 index 760b48da..00000000 --- a/backend/tests/helpers/cleanup.py +++ /dev/null @@ -1,21 +0,0 @@ -import redis.asyncio as redis -from app.core.database_context import Database - - -async def cleanup_db_and_redis(db: Database, redis_client: redis.Redis) -> None: - """Clean DB and Redis before a test. - - Beanie is already initialized once during app lifespan (dishka_lifespan.py). - We just delete documents to preserve indexes and avoid file descriptor exhaustion. - - NOTE: With pytest-xdist, each worker is assigned a dedicated Redis DB - derived from the worker id (sum(_WORKER_ID.encode()) % 16), so flushdb() - is safe and only affects that worker's database. See tests/conftest.py - for REDIS_DB setup. - """ - collections = await db.list_collection_names(filter={"type": "collection"}) - for name in collections: - if not name.startswith("system."): - await db[name].delete_many({}) - - await redis_client.flushdb() diff --git a/backend/tests/helpers/events.py b/backend/tests/helpers/events.py deleted file mode 100644 index 055eec31..00000000 --- a/backend/tests/helpers/events.py +++ /dev/null @@ -1,50 +0,0 @@ -import uuid -from typing import Iterable - -from app.domain.events.typed import EventMetadata, ExecutionRequestedEvent - - -def make_execution_requested_event( - *, - execution_id: str | None = None, - script: str = "print('hello')", - language: str = "python", - language_version: str = "3.11", - runtime_image: str = "python:3.11-slim", - runtime_command: Iterable[str] = ("python",), - runtime_filename: str = "main.py", - timeout_seconds: int = 5, - cpu_limit: str = "100m", - memory_limit: str = "128Mi", - cpu_request: str = "50m", - memory_request: str = "64Mi", - priority: int = 5, - service_name: str = "tests", - service_version: str = "1.0.0", - user_id: str | None = None, -) -> ExecutionRequestedEvent: - """Factory for ExecutionRequestedEvent with sensible defaults. - - Override any field via keyword args. If no execution_id is provided, a random one is generated. - """ - if execution_id is None: - execution_id = f"exec-{uuid.uuid4().hex[:8]}" - - metadata = EventMetadata(service_name=service_name, service_version=service_version, user_id=user_id) - return ExecutionRequestedEvent( - execution_id=execution_id, - aggregate_id=execution_id, # Match production: aggregate_id == execution_id for execution events - script=script, - language=language, - language_version=language_version, - runtime_image=runtime_image, - runtime_command=list(runtime_command), - runtime_filename=runtime_filename, - timeout_seconds=timeout_seconds, - cpu_limit=cpu_limit, - memory_limit=memory_limit, - cpu_request=cpu_request, - memory_request=memory_request, - priority=priority, - metadata=metadata, - ) diff --git a/backend/tests/helpers/kafka.py b/backend/tests/helpers/kafka.py deleted file mode 100644 index 182b04c4..00000000 --- a/backend/tests/helpers/kafka.py +++ /dev/null @@ -1,21 +0,0 @@ -from collections.abc import Awaitable, Callable - -import pytest -from app.domain.events.typed import DomainEvent -from app.events.core import UnifiedProducer -from dishka import AsyncContainer - - -@pytest.fixture(scope="function") -async def producer(scope: AsyncContainer) -> UnifiedProducer: - """Real Kafka producer from DI scope.""" - prod: UnifiedProducer = await scope.get(UnifiedProducer) - return prod - - -@pytest.fixture(scope="function") -def send_event(producer: UnifiedProducer) -> Callable[[DomainEvent], Awaitable[None]]: - async def _send(ev: DomainEvent) -> None: - await producer.produce(ev) - return _send - diff --git a/backend/tests/helpers/sse.py b/backend/tests/helpers/sse.py deleted file mode 100644 index e72670f1..00000000 --- a/backend/tests/helpers/sse.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -import json -from typing import AsyncIterator, Iterable - -from httpx import AsyncClient - - -async def stream_sse(client: AsyncClient, url: str, timeout: float = 20.0) -> AsyncIterator[dict[str, object]]: - """Yield parsed SSE event dicts from the given URL within a timeout. - - Expects lines in the form "data: {...json...}" and ignores keepalives. - """ - async with asyncio.timeout(timeout): - async with client.stream("GET", url) as resp: - assert resp.status_code == 200, f"SSE stream {url} returned {resp.status_code}" - async for line in resp.aiter_lines(): - if not line or not line.startswith("data:"): - continue - payload = line[5:].strip() - if not payload or payload == "[DONE]": - continue - try: - ev = json.loads(payload) - except Exception: - continue - yield ev - - -async def wait_for_event_type( - client: AsyncClient, - url: str, - wanted_types: Iterable[str], - timeout: float = 20.0, -) -> dict[str, object]: - """Return first event whose event_type is in wanted_types, otherwise timeout.""" - wanted = {str(t).lower() for t in wanted_types} - async for ev in stream_sse(client, url, timeout=timeout): - et = str(ev.get("event_type") or "").lower() - if et in wanted: - return ev - raise TimeoutError(f"No event of types {wanted} seen on {url} within {timeout}s") - - -async def wait_for_execution_terminal( - client: AsyncClient, - execution_id: str, - timeout: float = 30.0, -) -> dict[str, object]: - terminal = {"execution_completed", "result_stored", "execution_failed", "execution_timeout", "execution_cancelled"} - url = f"/api/v1/events/executions/{execution_id}" - return await wait_for_event_type(client, url, terminal, timeout=timeout) - - -async def wait_for_execution_running( - client: AsyncClient, - execution_id: str, - timeout: float = 15.0, -) -> dict[str, object]: - running = {"execution_running", "execution_started", "execution_scheduled", "execution_queued"} - url = f"/api/v1/events/executions/{execution_id}" - return await wait_for_event_type(client, url, running, timeout=timeout) - diff --git a/backend/tests/integration/app/test_main_app.py b/backend/tests/integration/app/test_main_app.py deleted file mode 100644 index d92a5359..00000000 --- a/backend/tests/integration/app/test_main_app.py +++ /dev/null @@ -1,33 +0,0 @@ -from importlib import import_module - -import pytest -from app.settings import Settings -from fastapi import FastAPI -from starlette.routing import Route - -pytestmark = pytest.mark.integration - - -def test_create_app_real_instance(app: FastAPI) -> None: - assert isinstance(app, FastAPI) - - # Verify API routes are configured - paths = {r.path for r in app.router.routes if isinstance(r, Route)} - assert any(p.startswith("/api/") for p in paths) - - # Verify required middlewares are actually present in the stack - middleware_class_names = {getattr(m.cls, "__name__", str(m.cls)) for m in app.user_middleware} - - # Check that all required middlewares are configured - assert "CORSMiddleware" in middleware_class_names, "CORS middleware not configured" - assert "CorrelationMiddleware" in middleware_class_names, "Correlation middleware not configured" - assert "RequestSizeLimitMiddleware" in middleware_class_names, "Request size limit middleware not configured" - assert "CacheControlMiddleware" in middleware_class_names, "Cache control middleware not configured" - assert "MetricsMiddleware" in middleware_class_names, "Metrics middleware not configured" - assert "RateLimitMiddleware" in middleware_class_names, "Rate limit middleware not configured" - - -def test_create_app_function_constructs(test_settings: Settings) -> None: - # Sanity: calling create_app returns a FastAPI instance (lazy import) - inst = import_module("app.main").create_app(settings=test_settings) - assert isinstance(inst, FastAPI) diff --git a/backend/tests/integration/conftest.py b/backend/tests/integration/conftest.py deleted file mode 100644 index 0d824014..00000000 --- a/backend/tests/integration/conftest.py +++ /dev/null @@ -1,19 +0,0 @@ -from collections.abc import AsyncGenerator - -import pytest_asyncio -import redis.asyncio as redis -from app.core.database_context import Database - -from tests.helpers.cleanup import cleanup_db_and_redis - - -@pytest_asyncio.fixture(autouse=True) -async def _cleanup(db: Database, redis_client: redis.Redis) -> AsyncGenerator[None, None]: - """Clean DB and Redis before each integration test. - - Only pre-test cleanup - post-test cleanup causes event loop issues - when SSE/streaming tests hold connections across loop boundaries. - """ - await cleanup_db_and_redis(db, redis_client) - yield - # No post-test cleanup to avoid "Event loop is closed" errors diff --git a/backend/tests/integration/core/test_container.py b/backend/tests/integration/core/test_container.py deleted file mode 100644 index 85ef5122..00000000 --- a/backend/tests/integration/core/test_container.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -from app.core.database_context import Database -from app.services.event_service import EventService -from dishka import AsyncContainer - -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] - - -@pytest.mark.asyncio -async def test_container_resolves_services(app_container: AsyncContainer, scope: AsyncContainer) -> None: - # Container is the real Dishka container - assert isinstance(app_container, AsyncContainer) - - # Can resolve core dependencies from DI - db: Database = await scope.get(Database) - assert db.name and isinstance(db.name, str) - - svc: EventService = await scope.get(EventService) - assert isinstance(svc, EventService) diff --git a/backend/tests/integration/core/test_dishka_lifespan.py b/backend/tests/integration/core/test_dishka_lifespan.py deleted file mode 100644 index b1948131..00000000 --- a/backend/tests/integration/core/test_dishka_lifespan.py +++ /dev/null @@ -1,16 +0,0 @@ -from importlib import import_module - -from app.settings import Settings -from fastapi import FastAPI - - -def test_lifespan_container_attached(app: FastAPI) -> None: - # App fixture uses real lifespan; container is attached to app.state - assert isinstance(app, FastAPI) - assert hasattr(app.state, "dishka_container") - - -def test_create_app_attaches_container(test_settings: Settings) -> None: - app = import_module("app.main").create_app(settings=test_settings) - assert isinstance(app, FastAPI) - assert hasattr(app.state, "dishka_container") diff --git a/backend/tests/integration/services/__init__.py b/backend/tests/integration/services/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/tests/integration/services/coordinator/test_execution_coordinator.py b/backend/tests/integration/services/coordinator/test_execution_coordinator.py deleted file mode 100644 index c3d3ed61..00000000 --- a/backend/tests/integration/services/coordinator/test_execution_coordinator.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest -from app.services.coordinator.coordinator import ExecutionCoordinator -from dishka import AsyncContainer -from tests.helpers import make_execution_requested_event - -pytestmark = pytest.mark.integration - - -@pytest.mark.asyncio -async def test_handle_requested_and_schedule(scope: AsyncContainer) -> None: - coord: ExecutionCoordinator = await scope.get(ExecutionCoordinator) - ev = make_execution_requested_event(execution_id="e-real-1") - - # Handler now schedules immediately - no polling needed - await coord._handle_execution_requested(ev) # noqa: SLF001 - - # Execution should be active immediately after handler returns - assert "e-real-1" in coord._active_executions # noqa: SLF001 diff --git a/backend/tests/integration/services/execution/test_execution_service.py b/backend/tests/integration/services/execution/test_execution_service.py deleted file mode 100644 index c3e689e9..00000000 --- a/backend/tests/integration/services/execution/test_execution_service.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from app.domain.execution import ResourceLimitsDomain -from app.services.execution_service import ExecutionService -from dishka import AsyncContainer - -pytestmark = pytest.mark.integration - - -@pytest.mark.asyncio -async def test_execute_script_and_limits(scope: AsyncContainer) -> None: - svc: ExecutionService = await scope.get(ExecutionService) - limits = await svc.get_k8s_resource_limits() - assert isinstance(limits, ResourceLimitsDomain) - assert limits.cpu_limit and limits.memory_limit and limits.supported_runtimes - ex = await svc.get_example_scripts() - assert isinstance(ex, dict) - - res = await svc.execute_script( - "print(1)", user_id="u", client_ip="127.0.0.1", user_agent="pytest", - lang="python", lang_version="3.11" - ) - assert res.execution_id and res.lang == "python" diff --git a/backend/tests/integration/services/notifications/test_notification_service.py b/backend/tests/integration/services/notifications/test_notification_service.py deleted file mode 100644 index e8440ad9..00000000 --- a/backend/tests/integration/services/notifications/test_notification_service.py +++ /dev/null @@ -1,39 +0,0 @@ -import pytest -from app.db.repositories import NotificationRepository -from app.domain.enums.notification import NotificationChannel, NotificationSeverity -from app.domain.notification import DomainNotificationCreate -from app.services.notification_service import NotificationService -from dishka import AsyncContainer - -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] - - -@pytest.mark.asyncio -async def test_notification_service_crud_and_subscription(scope: AsyncContainer) -> None: - svc: NotificationService = await scope.get(NotificationService) - repo: NotificationRepository = await scope.get(NotificationRepository) - - # Create a notification via repository and then use service to mark/delete - n = DomainNotificationCreate( - user_id="u1", - severity=NotificationSeverity.MEDIUM, - tags=["x"], - channel=NotificationChannel.IN_APP, - subject="s", - body="b", - ) - created = await repo.create_notification(n) - got = await repo.get_notification(created.notification_id, "u1") - assert got is not None - - # Mark as read through service - ok = await svc.mark_as_read("u1", created.notification_id) - assert ok is True - - # Subscriptions via service wrapper calls the repo - await svc.update_subscription("u1", NotificationChannel.IN_APP, True) - sub = await repo.get_subscription("u1", NotificationChannel.IN_APP) - assert sub and sub.enabled is True - - # Delete via service - assert await svc.delete_notification("u1", created.notification_id) is True diff --git a/backend/tests/integration/services/replay/test_replay_service.py b/backend/tests/integration/services/replay/test_replay_service.py deleted file mode 100644 index 730a12c9..00000000 --- a/backend/tests/integration/services/replay/test_replay_service.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest -from app.domain.enums.replay import ReplayTarget, ReplayType -from app.services.event_replay import ReplayConfig, ReplayFilter -from app.services.replay_service import ReplayService -from dishka import AsyncContainer - -pytestmark = pytest.mark.integration - - -@pytest.mark.asyncio -async def test_replay_service_create_and_list(scope: AsyncContainer) -> None: - svc: ReplayService = await scope.get(ReplayService) - - cfg = ReplayConfig( - replay_type=ReplayType.EXECUTION, - target=ReplayTarget.TEST, - filter=ReplayFilter(), - max_events=1, - ) - res = await svc.create_session_from_config(cfg) - assert res.session_id and res.status.name in {"CREATED", "RUNNING", "COMPLETED"} - - # Sessions are tracked in memory; listing should work - sessions = svc.list_sessions(limit=10) - assert any(s.session_id == res.session_id for s in sessions) diff --git a/backend/tests/integration/services/saga/test_saga_service.py b/backend/tests/integration/services/saga/test_saga_service.py deleted file mode 100644 index 74780056..00000000 --- a/backend/tests/integration/services/saga/test_saga_service.py +++ /dev/null @@ -1,26 +0,0 @@ -from datetime import datetime, timezone - -import pytest -from app.domain.enums.user import UserRole -from app.schemas_pydantic.user import User -from app.services.saga.saga_service import SagaService -from dishka import AsyncContainer - -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] - - -@pytest.mark.asyncio -async def test_saga_service_basic(scope: AsyncContainer) -> None: - svc: SagaService = await scope.get(SagaService) - user = User( - user_id="u1", - username="u1", - email="u1@example.com", - role=UserRole.USER, - is_active=True, - is_superuser=False, - created_at=datetime.now(timezone.utc), - updated_at=datetime.now(timezone.utc), - ) - res = await svc.list_user_sagas(user) - assert hasattr(res, "sagas") and isinstance(res.sagas, list) diff --git a/backend/tests/integration/services/saved_script/test_saved_script_service.py b/backend/tests/integration/services/saved_script/test_saved_script_service.py deleted file mode 100644 index 3eb23eb6..00000000 --- a/backend/tests/integration/services/saved_script/test_saved_script_service.py +++ /dev/null @@ -1,32 +0,0 @@ -import pytest -from app.domain.saved_script import DomainSavedScriptCreate, DomainSavedScriptUpdate, SavedScriptNotFoundError -from app.services.saved_script_service import SavedScriptService -from dishka import AsyncContainer - -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] - - -def _create_payload() -> DomainSavedScriptCreate: - return DomainSavedScriptCreate(name="n", description=None, script="print(1)") - - -@pytest.mark.asyncio -async def test_crud_saved_script(scope: AsyncContainer) -> None: - service: SavedScriptService = await scope.get(SavedScriptService) - created = await service.create_saved_script(_create_payload(), user_id="u1") - assert created.user_id == "u1" - - got = await service.get_saved_script(str(created.script_id), "u1") - assert got and got.script_id == created.script_id - - out = await service.update_saved_script( - str(created.script_id), "u1", DomainSavedScriptUpdate(name="new", script="p") - ) - assert out and out.name == "new" - - lst = await service.list_saved_scripts("u1") - assert any(s.script_id == created.script_id for s in lst) - - await service.delete_saved_script(str(created.script_id), "u1") - with pytest.raises(SavedScriptNotFoundError): - await service.get_saved_script(str(created.script_id), "u1") diff --git a/backend/tests/integration/services/user_settings/test_user_settings_service.py b/backend/tests/integration/services/user_settings/test_user_settings_service.py deleted file mode 100644 index 1acb9d2e..00000000 --- a/backend/tests/integration/services/user_settings/test_user_settings_service.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import datetime, timezone - -import pytest -from app.domain.enums import Theme -from app.domain.user.settings_models import ( - DomainEditorSettings, - DomainNotificationSettings, - DomainUserSettingsUpdate, -) -from app.services.user_settings_service import UserSettingsService -from dishka import AsyncContainer - -pytestmark = [pytest.mark.integration, pytest.mark.mongodb] - - -@pytest.mark.asyncio -async def test_get_update_and_history(scope: AsyncContainer) -> None: - svc: UserSettingsService = await scope.get(UserSettingsService) - user_id = "u1" - - s1 = await svc.get_user_settings(user_id) - s2 = await svc.get_user_settings(user_id) - assert s1.user_id == s2.user_id - await svc.invalidate_cache(user_id) - s3 = await svc.get_user_settings(user_id) - assert s3.user_id == user_id - - updates = DomainUserSettingsUpdate(theme=Theme.DARK, notifications=DomainNotificationSettings(), - editor=DomainEditorSettings(tab_size=4)) - updated = await svc.update_user_settings(user_id, updates, reason="r") - assert updated.theme == Theme.DARK - - hist = await svc.get_settings_history(user_id) - assert isinstance(hist, list) - - # Restore to current point (no-op but tests snapshot + event publish path) - _ = await svc.restore_settings_to_point(user_id, datetime.now(timezone.utc)) - - # Update wrappers + cache stats - await svc.update_theme(user_id, Theme.DARK) - await svc.update_notification_settings(user_id, DomainNotificationSettings()) - await svc.update_editor_settings(user_id, DomainEditorSettings(tab_size=2)) - await svc.update_custom_setting(user_id, "k", "v") - stats = svc.get_cache_stats() - # Cache size may be 0 due to event bus self-invalidation race condition - assert "cache_size" in stats and stats["cache_size"] >= 0 diff --git a/backend/tests/integration/test_auth_routes.py b/backend/tests/integration/test_auth_routes.py deleted file mode 100644 index 467619f2..00000000 --- a/backend/tests/integration/test_auth_routes.py +++ /dev/null @@ -1,413 +0,0 @@ -from uuid import uuid4 - -import pytest -from app.domain.enums.user import UserRole as UserRoleEnum -from app.schemas_pydantic.user import UserResponse -from httpx import AsyncClient - - -@pytest.mark.integration -class TestAuthentication: - """Test authentication endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_user_registration_success(self, client: AsyncClient) -> None: - """Test successful user registration with all required fields.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"test_auth_user_{unique_id}", - "email": f"test_auth_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - response = await client.post("/api/v1/auth/register", json=registration_data) - assert response.status_code in [200, 201] - - # Validate response structure - user_data = response.json() - user = UserResponse(**user_data) - - # Verify all expected fields - assert user.username == registration_data["username"] - assert user.email == registration_data["email"] - assert user.role == UserRoleEnum.USER # Default role - assert user.is_active is True - assert "password" not in user_data - assert "hashed_password" not in user_data - - # Verify user_id is a valid UUID-like string - assert user.user_id is not None - assert len(user.user_id) > 0 - - # Verify timestamps - assert user.created_at is not None - assert user.updated_at is not None - - # Verify default values - assert user.is_superuser is False - - @pytest.mark.asyncio - async def test_user_registration_with_weak_password(self, client: AsyncClient) -> None: - """Test that registration fails with weak passwords.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"test_weak_pwd_{unique_id}", - "email": f"test_weak_{unique_id}@example.com", - "password": "weak" # Too short - } - - response = await client.post("/api/v1/auth/register", json=registration_data) - assert response.status_code in [400, 422] - - error_data = response.json() - assert "detail" in error_data - # Error message should mention password requirements - # Detail might be a string or list of validation errors - if isinstance(error_data["detail"], list): - error_text = str(error_data["detail"]).lower() - else: - error_text = error_data["detail"].lower() - assert any(word in error_text for word in ["password", "length", "characters", "weak", "short"]) - - @pytest.mark.asyncio - async def test_duplicate_username_registration(self, client: AsyncClient) -> None: - """Test that duplicate username registration is prevented.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"duplicate_user_{unique_id}", - "email": f"duplicate1_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # First registration should succeed - first_response = await client.post("/api/v1/auth/register", json=registration_data) - assert first_response.status_code in [200, 201] - - # Attempt duplicate registration with same username, different email - duplicate_data = { - "username": registration_data["username"], # Same username - "email": f"duplicate2_{unique_id}@example.com", # Different email - "password": "SecureP@ssw0rd123" - } - - duplicate_response = await client.post("/api/v1/auth/register", json=duplicate_data) - assert duplicate_response.status_code in [400, 409] - - error_data = duplicate_response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["already", "exists", "taken", "duplicate"]) - - @pytest.mark.asyncio - async def test_duplicate_email_registration(self, client: AsyncClient) -> None: - """Test that duplicate email registration is prevented.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"user_email1_{unique_id}", - "email": f"duplicate_email_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # First registration should succeed - first_response = await client.post("/api/v1/auth/register", json=registration_data) - assert first_response.status_code in [200, 201] - - # Attempt duplicate registration with same email, different username - duplicate_data = { - "username": f"user_email2_{unique_id}", # Different username - "email": registration_data["email"], # Same email - "password": "SecureP@ssw0rd123" - } - - duplicate_response = await client.post("/api/v1/auth/register", json=duplicate_data) - # Backend might allow duplicate emails but not duplicate usernames - # If it allows the registration, that's also valid behavior - assert duplicate_response.status_code in [200, 201, 400, 409] - - @pytest.mark.asyncio - async def test_login_success_with_valid_credentials(self, client: AsyncClient) -> None: - """Test successful login with valid credentials.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"login_test_{unique_id}", - "email": f"login_{unique_id}@example.com", - "password": "SecureLoginP@ss123" - } - - # Register user - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - # Login with form data - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - response_data = login_response.json() - - # Backend uses cookie-based auth, not JWT in response body - # Verify response structure matches actual API - assert "message" in response_data - assert response_data["message"] == "Login successful" - assert "username" in response_data - assert response_data["username"] == registration_data["username"] - assert "role" in response_data - - # CSRF token should be present - assert "csrf_token" in response_data - assert len(response_data["csrf_token"]) > 0 - - # Verify cookie is set - cookies = login_response.cookies - assert len(cookies) > 0 # Should have at least one cookie - - @pytest.mark.asyncio - async def test_login_failure_with_wrong_password(self, client: AsyncClient) -> None: - """Test that login fails with incorrect password.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"wrong_pwd_{unique_id}", - "email": f"wrong_pwd_{unique_id}@example.com", - "password": "CorrectP@ssw0rd123" - } - - # Register user - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - # Attempt login with wrong password - login_data = { - "username": registration_data["username"], - "password": "WrongPassword123" - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 401 - - error_data = login_response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["invalid", "incorrect", "credentials", "unauthorized"]) - - @pytest.mark.asyncio - async def test_login_failure_with_nonexistent_user(self, client: AsyncClient) -> None: - """Test that login fails for non-existent user.""" - unique_id = str(uuid4())[:8] - login_data = { - "username": f"nonexistent_user_{unique_id}", - "password": "AnyP@ssw0rd123" - } - - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 401 - - error_data = login_response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_get_current_user_info(self, client: AsyncClient) -> None: - """Test getting current user information via /me endpoint.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"me_test_{unique_id}", - "email": f"me_test_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # Register user - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - # Login - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - # Get current user info (cookies from login should be preserved) - me_response = await client.get("/api/v1/auth/me") - assert me_response.status_code == 200 - - user_data = me_response.json() - user = UserResponse(**user_data) - - # Verify user data matches registration - assert user.username == registration_data["username"] - assert user.email == registration_data["email"] - assert user.role == UserRoleEnum.USER - assert user.is_active is True - - # Verify no sensitive data is exposed - assert "password" not in user_data - assert "hashed_password" not in user_data - - @pytest.mark.asyncio - async def test_unauthorized_access_without_auth(self, client: AsyncClient) -> None: - """Test that protected endpoints require authentication.""" - # Try to access /me without authentication - response = await client.get("/api/v1/auth/me") - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_logout_clears_session(self, client: AsyncClient) -> None: - """Test logout functionality clears the session.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"logout_test_{unique_id}", - "email": f"logout_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # Register and login - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - # Verify we can access protected endpoint - me_response = await client.get("/api/v1/auth/me") - assert me_response.status_code == 200 - - # Logout - logout_response = await client.post("/api/v1/auth/logout") - assert logout_response.status_code == 200 - - logout_data = logout_response.json() - assert "message" in logout_data or "detail" in logout_data - - # Try to access protected endpoint again - should fail - me_after_logout = await client.get("/api/v1/auth/me") - assert me_after_logout.status_code == 401 - - @pytest.mark.asyncio - async def test_verify_token_endpoint(self, client: AsyncClient) -> None: - """Test token verification endpoint.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"verify_token_{unique_id}", - "email": f"verify_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # Register and login - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - # Verify token - verify_response = await client.get("/api/v1/auth/verify-token") - assert verify_response.status_code == 200 - - verify_data = verify_response.json() - assert "valid" in verify_data - assert verify_data["valid"] is True - - # Additional fields that might be returned - if "username" in verify_data: - assert verify_data["username"] == registration_data["username"] - - @pytest.mark.asyncio - async def test_invalid_email_format_rejected(self, client: AsyncClient) -> None: - """Test that invalid email formats are rejected during registration.""" - unique_id = str(uuid4())[:8] - invalid_emails = [ - "not-an-email", - "@example.com", - "user@", - "user@.com", - ] - - for invalid_email in invalid_emails: - registration_data = { - "username": f"invalid_email_{unique_id}", - "email": invalid_email, - "password": "ValidP@ssw0rd123" - } - - response = await client.post("/api/v1/auth/register", json=registration_data) - assert response.status_code in [400, 422] - - error_data = response.json() - assert "detail" in error_data - - # Update unique_id for next iteration to avoid username conflicts - unique_id = str(uuid4())[:8] - - @pytest.mark.asyncio - async def test_csrf_token_generation(self, client: AsyncClient) -> None: - """Test CSRF token generation on login.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"csrf_test_{unique_id}", - "email": f"csrf_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # Register user - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - # Login - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - response_data = login_response.json() - - # CSRF token should be generated (if implementation includes it) - if "csrf_token" in response_data: - assert len(response_data["csrf_token"]) > 0 - # CSRF tokens are typically base64 or hex strings - assert isinstance(response_data["csrf_token"], str) - - @pytest.mark.asyncio - async def test_session_persistence_across_requests(self, client: AsyncClient) -> None: - """Test that session persists across multiple requests after login.""" - unique_id = str(uuid4())[:8] - registration_data = { - "username": f"session_test_{unique_id}", - "email": f"session_{unique_id}@example.com", - "password": "SecureP@ssw0rd123" - } - - # Register and login - reg_response = await client.post("/api/v1/auth/register", json=registration_data) - assert reg_response.status_code in [200, 201] - - login_data = { - "username": registration_data["username"], - "password": registration_data["password"] - } - login_response = await client.post("/api/v1/auth/login", data=login_data) - assert login_response.status_code == 200 - - # Make multiple authenticated requests - for _ in range(3): - me_response = await client.get("/api/v1/auth/me") - assert me_response.status_code == 200 - - user_data = me_response.json() - assert user_data["username"] == registration_data["username"] diff --git a/backend/tests/integration/test_dlq_routes.py b/backend/tests/integration/test_dlq_routes.py deleted file mode 100644 index ef59fd82..00000000 --- a/backend/tests/integration/test_dlq_routes.py +++ /dev/null @@ -1,376 +0,0 @@ -from datetime import datetime -from typing import TypedDict - -import pytest -from app.dlq import AgeStatistics, DLQMessageStatus, EventTypeStatistic, TopicStatistic -from app.schemas_pydantic.dlq import ( - DLQBatchRetryResponse, - DLQMessageDetail, - DLQMessageResponse, - DLQMessagesResponse, - DLQStats, - DLQTopicSummaryResponse, -) -from app.schemas_pydantic.user import MessageResponse -from app.settings import Settings -from httpx import AsyncClient - - -class _RetryRequest(TypedDict): - event_ids: list[str] - - -@pytest.mark.integration -class TestDLQRoutes: - """Test DLQ endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_dlq_requires_authentication(self, client: AsyncClient) -> None: - """Test that DLQ endpoints require authentication.""" - # Try to access DLQ stats without auth - response = await client.get("/api/v1/dlq/stats") - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_get_dlq_statistics(self, test_user: AsyncClient) -> None: - """Test getting DLQ statistics.""" - # Get DLQ stats - response = await test_user.get("/api/v1/dlq/stats") - assert response.status_code == 200 - - # Validate response structure - stats_data = response.json() - stats = DLQStats(**stats_data) - - # Verify structure - using typed models - assert isinstance(stats.by_status, dict) - assert isinstance(stats.by_topic, list) - assert isinstance(stats.by_event_type, list) - assert isinstance(stats.age_stats, AgeStatistics) - assert stats.timestamp is not None - - # Check status breakdown - iterate over actual enum values - for status in DLQMessageStatus: - if status in stats.by_status: - assert isinstance(stats.by_status[status], int) - assert stats.by_status[status] >= 0 - - # Check topic stats - now typed as TopicStatistic - for topic_stat in stats.by_topic: - assert isinstance(topic_stat, TopicStatistic) - assert topic_stat.count >= 0 - - # Check event type stats - now typed as EventTypeStatistic - for event_type_stat in stats.by_event_type: - assert isinstance(event_type_stat, EventTypeStatistic) - assert event_type_stat.count >= 0 - - # Check age stats - now typed as AgeStatistics - assert stats.age_stats.min_age_seconds >= 0 - assert stats.age_stats.max_age_seconds >= 0 - assert stats.age_stats.avg_age_seconds >= 0 - - @pytest.mark.asyncio - async def test_list_dlq_messages(self, test_user: AsyncClient) -> None: - """Test listing DLQ messages with filters.""" - # List all DLQ messages - response = await test_user.get("/api/v1/dlq/messages?limit=10&offset=0") - assert response.status_code == 200 - - # Validate response structure - messages_data = response.json() - messages_response = DLQMessagesResponse(**messages_data) - - # Verify pagination - assert isinstance(messages_response.messages, list) - assert isinstance(messages_response.total, int) - assert messages_response.limit == 10 - assert messages_response.offset == 0 - assert messages_response.total >= 0 - - # If there are messages, validate their structure - for message in messages_response.messages: - assert isinstance(message, DLQMessageResponse) - assert message.event.event_id is not None - assert message.event.event_type is not None - assert message.original_topic is not None - assert message.retry_count >= 0 - assert message.failed_at is not None - assert message.status in DLQMessageStatus.__members__.values() - - @pytest.mark.asyncio - async def test_filter_dlq_messages_by_status(self, test_user: AsyncClient) -> None: - """Test filtering DLQ messages by status.""" - # Test different status filters - for status in ["pending", "scheduled", "retried", "discarded"]: - response = await test_user.get(f"/api/v1/dlq/messages?status={status}&limit=5") - assert response.status_code == 200 - - messages_data = response.json() - messages_response = DLQMessagesResponse(**messages_data) - - # All returned messages should have the requested status - for message in messages_response.messages: - assert message.status == status - - @pytest.mark.asyncio - async def test_filter_dlq_messages_by_topic(self, test_user: AsyncClient) -> None: - """Test filtering DLQ messages by topic.""" - # Filter by a specific topic - test_topic = "execution-events" - response = await test_user.get(f"/api/v1/dlq/messages?topic={test_topic}&limit=5") - assert response.status_code == 200 - - messages_data = response.json() - messages_response = DLQMessagesResponse(**messages_data) - - # All returned messages should be from the requested topic - for message in messages_response.messages: - assert message.original_topic == test_topic - - @pytest.mark.asyncio - async def test_get_single_dlq_message_detail(self, test_user: AsyncClient) -> None: - """Test getting detailed information for a single DLQ message.""" - # First get list of messages to find an ID - list_response = await test_user.get("/api/v1/dlq/messages?limit=1") - assert list_response.status_code == 200 - - messages_data = list_response.json() - if messages_data["total"] > 0 and messages_data["messages"]: - # Get details for the first message - event_id = messages_data["messages"][0]["event_id"] - - detail_response = await test_user.get(f"/api/v1/dlq/messages/{event_id}") - assert detail_response.status_code == 200 - - # Validate detailed response - detail_data = detail_response.json() - message_detail = DLQMessageDetail(**detail_data) - - # Verify all fields are present - event is DomainEvent with event_id/event_type - assert message_detail.event is not None - assert message_detail.event.event_id == event_id - assert message_detail.event.event_type is not None - assert message_detail.original_topic is not None - assert message_detail.error is not None - assert message_detail.retry_count >= 0 - assert message_detail.failed_at is not None - assert message_detail.status in DLQMessageStatus.__members__.values() - assert message_detail.created_at is not None - assert message_detail.last_updated is not None - - # Optional fields - if message_detail.producer_id: - assert isinstance(message_detail.producer_id, str) - if message_detail.dlq_offset is not None: - assert message_detail.dlq_offset >= 0 - if message_detail.dlq_partition is not None: - assert message_detail.dlq_partition >= 0 - - @pytest.mark.asyncio - async def test_get_nonexistent_dlq_message(self, test_user: AsyncClient) -> None: - """Test getting a non-existent DLQ message.""" - # Try to get non-existent message - fake_event_id = "00000000-0000-0000-0000-000000000000" - response = await test_user.get(f"/api/v1/dlq/messages/{fake_event_id}") - assert response.status_code == 404 - - error_data = response.json() - assert "detail" in error_data - assert "not found" in error_data["detail"].lower() - - @pytest.mark.asyncio - async def test_set_retry_policy( - self, test_user: AsyncClient, test_settings: Settings - ) -> None: - """Test setting a retry policy for a topic.""" - # Set retry policy - topic = f"{test_settings.KAFKA_TOPIC_PREFIX}test-topic" - policy_data = { - "topic": topic, - "strategy": "exponential_backoff", - "max_retries": 5, - "base_delay_seconds": 10, - "max_delay_seconds": 3600, - "retry_multiplier": 2.0 - } - - response = await test_user.post("/api/v1/dlq/retry-policy", json=policy_data) - assert response.status_code == 200 - - # Validate response - result_data = response.json() - result = MessageResponse(**result_data) - assert "retry policy set" in result.message.lower() - assert topic in result.message - - @pytest.mark.asyncio - async def test_retry_dlq_messages_batch(self, test_user: AsyncClient) -> None: - """Test retrying a batch of DLQ messages.""" - # Get some failed messages to retry - list_response = await test_user.get("/api/v1/dlq/messages?status=discarded&limit=3") - assert list_response.status_code == 200 - - messages_data = list_response.json() - if messages_data["total"] > 0 and messages_data["messages"]: - # Collect event IDs to retry - event_ids = [msg["event_id"] for msg in messages_data["messages"][:2]] - - # Retry the messages - retry_request = { - "event_ids": event_ids - } - - retry_response = await test_user.post("/api/v1/dlq/retry", json=retry_request) - assert retry_response.status_code == 200 - - # Validate retry response - retry_data = retry_response.json() - batch_result = DLQBatchRetryResponse(**retry_data) - - assert batch_result.total == len(event_ids) - assert batch_result.successful >= 0 - assert batch_result.failed >= 0 - assert batch_result.successful + batch_result.failed == batch_result.total - - # Check details if present - if batch_result.details: - assert isinstance(batch_result.details, list) - for detail in batch_result.details: - assert isinstance(detail, dict) - assert "event_id" in detail - assert "success" in detail - - @pytest.mark.asyncio - async def test_discard_dlq_message(self, test_user: AsyncClient) -> None: - """Test discarding a DLQ message.""" - # Get a failed message to discard - list_response = await test_user.get("/api/v1/dlq/messages?status=discarded&limit=1") - assert list_response.status_code == 200 - - messages_data = list_response.json() - if messages_data["total"] > 0 and messages_data["messages"]: - event_id = messages_data["messages"][0]["event_id"] - - # Discard the message - discard_reason = "Test discard - message unrecoverable" - discard_response = await test_user.delete( - f"/api/v1/dlq/messages/{event_id}?reason={discard_reason}" - ) - assert discard_response.status_code == 200 - - # Validate response - result_data = discard_response.json() - result = MessageResponse(**result_data) - assert "discarded" in result.message.lower() - assert event_id in result.message - - # Verify message is now discarded - detail_response = await test_user.get(f"/api/v1/dlq/messages/{event_id}") - if detail_response.status_code == 200: - detail_data = detail_response.json() - # Status should be discarded - assert detail_data["status"] == "discarded" - - @pytest.mark.asyncio - async def test_get_dlq_topics_summary(self, test_user: AsyncClient) -> None: - """Test getting DLQ topics summary.""" - # Get topics summary - response = await test_user.get("/api/v1/dlq/topics") - assert response.status_code == 200 - - # Validate response - topics_data = response.json() - assert isinstance(topics_data, list) - - for topic_data in topics_data: - topic_summary = DLQTopicSummaryResponse(**topic_data) - - # Verify structure - assert topic_summary.topic is not None - assert isinstance(topic_summary.total_messages, int) - assert topic_summary.total_messages >= 0 - assert isinstance(topic_summary.status_breakdown, dict) - - # Check status breakdown - for status, count in topic_summary.status_breakdown.items(): - assert status in ["pending", "scheduled", "retried", "discarded"] - assert isinstance(count, int) - assert count >= 0 - - # Check dates if present (may be str or datetime) - if topic_summary.oldest_message: - assert isinstance(topic_summary.oldest_message, (str, datetime)) - if topic_summary.newest_message: - assert isinstance(topic_summary.newest_message, (str, datetime)) - - # Check retry stats - if topic_summary.avg_retry_count is not None: - assert topic_summary.avg_retry_count >= 0 - if topic_summary.max_retry_count is not None: - assert topic_summary.max_retry_count >= 0 - - @pytest.mark.asyncio - async def test_dlq_message_pagination(self, test_user: AsyncClient) -> None: - """Test DLQ message pagination.""" - # Get first page - page1_response = await test_user.get("/api/v1/dlq/messages?limit=5&offset=0") - assert page1_response.status_code == 200 - - page1_data = page1_response.json() - page1 = DLQMessagesResponse(**page1_data) - - # If there are more than 5 messages, get second page - if page1.total > 5: - page2_response = await test_user.get("/api/v1/dlq/messages?limit=5&offset=5") - assert page2_response.status_code == 200 - - page2_data = page2_response.json() - page2 = DLQMessagesResponse(**page2_data) - - # Verify pagination - assert page2.offset == 5 - assert page2.limit == 5 - assert page2.total == page1.total - - # Messages should be different - if page1.messages and page2.messages: - page1_ids = {msg.event.event_id for msg in page1.messages} - page2_ids = {msg.event.event_id for msg in page2.messages} - # Should have no overlap - assert len(page1_ids.intersection(page2_ids)) == 0 - - @pytest.mark.asyncio - async def test_dlq_error_handling(self, test_user: AsyncClient) -> None: - """Test DLQ error handling for invalid requests.""" - # Test invalid limit - response = await test_user.get("/api/v1/dlq/messages?limit=10000") # Too high - # Should either accept with max limit or reject - assert response.status_code in [200, 400, 422] - - # Test negative offset - response = await test_user.get("/api/v1/dlq/messages?limit=10&offset=-1") - assert response.status_code in [400, 422] - - # Test invalid status filter - response = await test_user.get("/api/v1/dlq/messages?status=invalid_status") - assert response.status_code in [400, 422] - - # Test retry with empty list - retry_request: _RetryRequest = { - "event_ids": [] - } - response = await test_user.post("/api/v1/dlq/retry", json=retry_request) - # Should handle gracefully or reject invalid input - assert response.status_code in [200, 400, 404, 422] - - # Test discard without reason - fake_event_id = "00000000-0000-0000-0000-000000000000" - response = await test_user.delete(f"/api/v1/dlq/messages/{fake_event_id}") - # Should require reason parameter - assert response.status_code in [400, 422, 404] diff --git a/backend/tests/integration/test_events_routes.py b/backend/tests/integration/test_events_routes.py deleted file mode 100644 index b5de5950..00000000 --- a/backend/tests/integration/test_events_routes.py +++ /dev/null @@ -1,499 +0,0 @@ -from datetime import datetime, timedelta, timezone -from uuid import uuid4 - -import pytest -from app.domain.enums.events import EventType -from app.schemas_pydantic.events import ( - EventListResponse, - EventResponse, - EventStatistics, - PublishEventResponse, - ReplayAggregateResponse, -) -from httpx import AsyncClient - - -@pytest.mark.integration -class TestEventsRoutes: - """Test events endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_events_require_authentication(self, client: AsyncClient) -> None: - """Test that event endpoints require authentication.""" - # Try to access events without auth - response = await client.get("/api/v1/events/user") - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_get_user_events(self, test_user: AsyncClient) -> None: - """Test getting user's events.""" - # Already authenticated via test_user fixture - - # Get user events - response = await test_user.get("/api/v1/events/user?limit=10&skip=0") - # Some deployments may route this path under a dynamic segment and return 404. - # Accept 200 with a valid payload or 404 (no such resource). - assert response.status_code in [200, 404] - if response.status_code == 200: - # Validate response structure - events_data = response.json() - events_response = EventListResponse(**events_data) - - # Verify pagination - assert isinstance(events_response.events, list) - assert isinstance(events_response.total, int) - assert events_response.limit == 10 - assert events_response.skip == 0 - assert isinstance(events_response.has_more, bool) - assert events_response.total >= 0 - - # If there are events, validate their structure - for event in events_response.events: - assert isinstance(event, EventResponse) - assert event.event_id is not None - assert event.event_type is not None - assert event.aggregate_id is not None - assert event.timestamp is not None - assert event.event_version is not None - assert event.metadata is not None - assert event.metadata.user_id is not None - - # Optional fields - if event.payload: - assert isinstance(event.payload, dict) - if event.correlation_id: - assert isinstance(event.correlation_id, str) - - @pytest.mark.asyncio - async def test_get_user_events_with_filters(self, test_user: AsyncClient) -> None: - """Test filtering user events.""" - # Already authenticated via test_user fixture - - # Create an execution to generate events - execution_request = { - "script": "print('Test for event filtering')", - "lang": "python", - "lang_version": "3.11" - } - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - # Filter by event types - event_types = ["execution.requested", "execution.completed"] - params: dict[str, str | int | list[str]] = { - "event_types": event_types, - "limit": 20, - "sort_order": "desc" - } - - response = await test_user.get("/api/v1/events/user", params=params) - assert response.status_code in [200, 404] - if response.status_code == 200: - events_data = response.json() - events_response = EventListResponse(**events_data) - - # Filtered events should only contain specified types - for event in events_response.events: - if event.event_type: # Some events might have been created - assert any(event_type in event.event_type for event_type in event_types) or len( - events_response.events) == 0 - - @pytest.mark.asyncio - async def test_get_execution_events(self, test_user: AsyncClient) -> None: - """Test getting events for a specific execution.""" - # Create an execution - execution_request = { - "script": "print('Test execution events')", - "lang": "python", - "lang_version": "3.11" - } - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - execution_id = exec_response.json()["execution_id"] - - # Get execution events (JSON, not SSE stream) - response = await test_user.get( - f"/api/v1/events/executions/{execution_id}/events?include_system_events=true" - ) - assert response.status_code == 200 - - events_data = response.json() - events_response = EventListResponse(**events_data) - - # Should return a valid payload; some environments may have no persisted events - assert isinstance(events_response.events, list) - - # All events should be for this execution - for event in events_response.events: - # Check if execution_id is in aggregate_id or payload - if event.aggregate_id: - assert execution_id in event.aggregate_id or event.aggregate_id == execution_id - - @pytest.mark.asyncio - async def test_query_events_advanced(self, test_user: AsyncClient) -> None: - """Test advanced event querying with filters.""" - # Query events with multiple filters - query_request = { - "event_types": [ - EventType.EXECUTION_REQUESTED, - EventType.EXECUTION_COMPLETED - ], - "start_time": (datetime.now(timezone.utc) - timedelta(days=7)).isoformat(), - "end_time": datetime.now(timezone.utc).isoformat(), - "limit": 50, - "skip": 0, - "sort_by": "timestamp", - "sort_order": "desc" - } - - response = await test_user.post("/api/v1/events/query", json=query_request) - assert response.status_code == 200 - - events_data = response.json() - events_response = EventListResponse(**events_data) - - # Verify query results - assert isinstance(events_response.events, list) - assert events_response.limit == 50 - assert events_response.skip == 0 - - # Events should be sorted by timestamp descending - if len(events_response.events) > 1: - for i in range(len(events_response.events) - 1): - t1 = events_response.events[i].timestamp - t2 = events_response.events[i + 1].timestamp - assert isinstance(t1, datetime) and isinstance(t2, datetime) - assert t1 >= t2 # Descending order - - @pytest.mark.asyncio - async def test_get_events_by_correlation_id(self, test_user: AsyncClient) -> None: - """Test getting events by correlation ID.""" - # Create an execution (which generates correlated events) - execution_request = { - "script": "print('Test correlation')", - "lang": "python", - "lang_version": "3.11" - } - exec_response = await test_user.post("/api/v1/execute", json=execution_request) - assert exec_response.status_code == 200 - - # Get events for the user to find a correlation ID - user_events_response = await test_user.get("/api/v1/events/user?limit=10") - assert user_events_response.status_code == 200 - - user_events = user_events_response.json() - if user_events["events"] and user_events["events"][0].get("correlation_id"): - correlation_id = user_events["events"][0]["correlation_id"] - - # Get events by correlation ID - response = await test_user.get(f"/api/v1/events/correlation/{correlation_id}?limit=50") - assert response.status_code == 200 - - correlated_events = response.json() - events_response = EventListResponse(**correlated_events) - - # All events should have the same correlation ID - for event in events_response.events: - if event.correlation_id: - assert event.correlation_id == correlation_id - - @pytest.mark.asyncio - async def test_get_current_request_events(self, test_user: AsyncClient) -> None: - """Test getting events for the current request.""" - # Get current request events (might be empty if no correlation context) - response = await test_user.get("/api/v1/events/current-request?limit=10") - assert response.status_code == 200 - - events_data = response.json() - events_response = EventListResponse(**events_data) - - # Should return a valid response (might be empty) - assert isinstance(events_response.events, list) - assert events_response.total >= 0 - - @pytest.mark.asyncio - async def test_get_event_statistics(self, test_user: AsyncClient) -> None: - """Test getting event statistics.""" - # Get statistics for last 24 hours - response = await test_user.get("/api/v1/events/statistics") - assert response.status_code == 200 - - stats_data = response.json() - stats = EventStatistics(**stats_data) - - # Verify statistics structure - assert isinstance(stats.total_events, int) - assert stats.total_events >= 0 - assert isinstance(stats.events_by_type, dict) - assert isinstance(stats.events_by_hour, list) - # Optional extra fields may not be present in this deployment - - # Optional window fields are allowed by schema; no strict check here - - # Events by hour should have proper structure - for hourly_stat in stats.events_by_hour: - # HourlyEventCountSchema has hour: str and count: int - assert isinstance(hourly_stat.hour, str) - assert isinstance(hourly_stat.count, int) - assert hourly_stat.count >= 0 - - @pytest.mark.asyncio - async def test_get_single_event(self, test_user: AsyncClient) -> None: - """Test getting a single event by ID.""" - # Get user events to find an event ID - events_response = await test_user.get("/api/v1/events/user?limit=1") - assert events_response.status_code == 200 - - events_data = events_response.json() - if events_data["total"] > 0 and events_data["events"]: - event_id = events_data["events"][0]["event_id"] - - # Get single event - response = await test_user.get(f"/api/v1/events/{event_id}") - assert response.status_code == 200 - - event_data = response.json() - event = EventResponse(**event_data) - - # Verify it's the correct event - assert event.event_id == event_id - assert event.event_type is not None - assert event.timestamp is not None - - @pytest.mark.asyncio - async def test_get_nonexistent_event(self, test_user: AsyncClient) -> None: - """Test getting a non-existent event.""" - # Try to get non-existent event - fake_event_id = str(uuid4()) - response = await test_user.get(f"/api/v1/events/{fake_event_id}") - assert response.status_code == 404 - - error_data = response.json() - assert "detail" in error_data - assert "not found" in error_data["detail"].lower() - - @pytest.mark.asyncio - async def test_list_event_types(self, test_user: AsyncClient) -> None: - """Test listing available event types.""" - # List event types - response = await test_user.get("/api/v1/events/types/list") - assert response.status_code == 200 - - event_types = response.json() - assert isinstance(event_types, list) - - # Event types should be non-empty strings - for event_type in event_types: - assert isinstance(event_type, str) - assert len(event_type) > 0 - - @pytest.mark.asyncio - async def test_publish_custom_event_requires_admin(self, test_user: AsyncClient) -> None: - """Test that publishing custom events requires admin privileges.""" - # Try to publish custom event (logged in as regular user via fixture) - publish_request = { - "event_type": EventType.SYSTEM_ERROR, - "payload": { - "test": "data", - "value": 123 - }, - "aggregate_id": str(uuid4()), - "correlation_id": str(uuid4()) - } - - response = await test_user.post("/api/v1/events/publish", json=publish_request) - assert response.status_code == 403 # Forbidden for non-admin - - @pytest.mark.asyncio - @pytest.mark.kafka - async def test_publish_custom_event_as_admin(self, test_admin: AsyncClient) -> None: - """Test publishing custom events as admin.""" - # Publish custom event (requires Kafka); skip if not available - aggregate_id = str(uuid4()) - publish_request = { - "event_type": EventType.SYSTEM_ERROR, - "payload": { - "error_type": "test_error", - "message": "Admin test system error", - "service_name": "tests" - }, - "aggregate_id": aggregate_id, - "correlation_id": str(uuid4()), - "metadata": { - "source": "integration_test", - "version": "1.0" - } - } - - response = await test_admin.post("/api/v1/events/publish", json=publish_request) - if response.status_code != 200: - pytest.skip("Kafka not available for publishing events") - - publish_response = PublishEventResponse(**response.json()) - assert publish_response.event_id is not None - assert publish_response.status == "published" - assert publish_response.timestamp is not None - - @pytest.mark.asyncio - async def test_aggregate_events(self, test_user: AsyncClient) -> None: - """Test event aggregation.""" - # Create aggregation pipeline - aggregation_request = { - "pipeline": [ - {"$match": {"event_type": {"$regex": "execution"}}}, - {"$group": {"_id": "$event_type", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}} - ], - "limit": 10 - } - - response = await test_user.post("/api/v1/events/aggregate", json=aggregation_request) - assert response.status_code == 200 - - results = response.json() - assert isinstance(results, list) - - # Verify aggregation results structure - for result in results: - assert isinstance(result, dict) - assert "_id" in result # Group key - assert "count" in result # Aggregation result - assert isinstance(result["count"], int) - assert result["count"] >= 0 - - @pytest.mark.asyncio - async def test_delete_event_requires_admin(self, test_user: AsyncClient) -> None: - """Test that deleting events requires admin privileges.""" - # Try to delete an event (logged in as regular user via fixture) - fake_event_id = str(uuid4()) - response = await test_user.delete(f"/api/v1/events/{fake_event_id}") - assert response.status_code == 403 # Forbidden for non-admin - - @pytest.mark.asyncio - async def test_replay_aggregate_events_requires_admin(self, test_user: AsyncClient) -> None: - """Test that replaying events requires admin privileges.""" - # Try to replay events (logged in as regular user via fixture) - aggregate_id = str(uuid4()) - response = await test_user.post(f"/api/v1/events/replay/{aggregate_id}?dry_run=true") - assert response.status_code == 403 # Forbidden for non-admin - - @pytest.mark.asyncio - async def test_replay_aggregate_events_dry_run(self, test_admin: AsyncClient) -> None: - """Test replaying events in dry-run mode.""" - # Get an existing aggregate ID from events - events_response = await test_admin.get("/api/v1/events/user?limit=1") - assert events_response.status_code == 200 - - events_data = events_response.json() - if events_data["total"] > 0 and events_data["events"]: - aggregate_id = events_data["events"][0]["aggregate_id"] - - # Try dry-run replay - response = await test_admin.post( - f"/api/v1/events/replay/{aggregate_id}?dry_run=true" - ) - - if response.status_code == 200: - replay_data = response.json() - replay_response = ReplayAggregateResponse(**replay_data) - - assert replay_response.dry_run is True - assert replay_response.aggregate_id == aggregate_id - assert replay_response.event_count is not None and replay_response.event_count >= 0 - - if replay_response.event_types: - assert isinstance(replay_response.event_types, list) - if replay_response.start_time: - assert isinstance(replay_response.start_time, datetime) - if replay_response.end_time: - assert isinstance(replay_response.end_time, datetime) - elif response.status_code == 404: - # No events for this aggregate - error_data = response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_event_pagination(self, test_user: AsyncClient) -> None: - """Test event pagination.""" - # Get first page - page1_response = await test_user.get("/api/v1/events/user?limit=5&skip=0") - assert page1_response.status_code == 200 - - page1_data = page1_response.json() - page1 = EventListResponse(**page1_data) - - # If there are more than 5 events, get second page - if page1.total > 5: - page2_response = await test_user.get("/api/v1/events/user?limit=5&skip=5") - assert page2_response.status_code == 200 - - page2_data = page2_response.json() - page2 = EventListResponse(**page2_data) - - # Verify pagination - assert page2.skip == 5 - assert page2.limit == 5 - assert page2.total == page1.total - - # Events should be different - if page1.events and page2.events: - page1_ids = {e.event_id for e in page1.events} - page2_ids = {e.event_id for e in page2.events} - # Should have no overlap - assert len(page1_ids.intersection(page2_ids)) == 0 - - @pytest.mark.asyncio - async def test_events_isolation_between_users(self, test_user: AsyncClient, - test_admin: AsyncClient) -> None: - """Test that events are properly isolated between users.""" - # Get each user's user_id from /me endpoint - user_me_response = await test_user.get("/api/v1/auth/me") - assert user_me_response.status_code == 200 - user_id = user_me_response.json()["user_id"] - - admin_me_response = await test_admin.get("/api/v1/auth/me") - assert admin_me_response.status_code == 200 - admin_id = admin_me_response.json()["user_id"] - - # Verify the two users are different - assert user_id != admin_id, "Test requires two different users" - - # Get events as regular user - user_events_response = await test_user.get("/api/v1/events/user?limit=10") - assert user_events_response.status_code == 200 - - user_events = user_events_response.json() - user_event_ids = {e["event_id"] for e in user_events["events"]} - - # Get events as admin (without include_all_users flag) - admin_events_response = await test_admin.get("/api/v1/events/user?limit=10") - assert admin_events_response.status_code == 200 - - admin_events = admin_events_response.json() - admin_event_ids = {e["event_id"] for e in admin_events["events"]} - - # Verify user events belong to the user - for event in user_events["events"]: - meta = event.get("metadata") or {} - if meta.get("user_id"): - assert meta["user_id"] == user_id, ( - f"User event has wrong user_id: expected {user_id}, got {meta['user_id']}" - ) - - # Verify admin events belong to the admin - for event in admin_events["events"]: - meta = event.get("metadata") or {} - if meta.get("user_id"): - assert meta["user_id"] == admin_id, ( - f"Admin event has wrong user_id: expected {admin_id}, got {meta['user_id']}" - ) - - # Verify no overlap in event IDs between users (proper isolation) - overlap = user_event_ids & admin_event_ids - assert not overlap, f"Events leaked between users: {overlap}" diff --git a/backend/tests/integration/test_health_routes.py b/backend/tests/integration/test_health_routes.py deleted file mode 100644 index 15485b8a..00000000 --- a/backend/tests/integration/test_health_routes.py +++ /dev/null @@ -1,92 +0,0 @@ -import asyncio -import time - -import pytest -from httpx import AsyncClient - - -@pytest.mark.integration -class TestHealthRoutes: - """Backend availability checks (no dedicated health endpoints).""" - - @pytest.mark.asyncio - async def test_liveness_available(self, client: AsyncClient) -> None: - r = await client.get("/api/v1/health/live") - assert r.status_code == 200 - data = r.json() - assert isinstance(data, dict) - assert data.get("status") == "ok" - - @pytest.mark.asyncio - async def test_liveness_no_auth_required(self, client: AsyncClient) -> None: - """Liveness should not require authentication.""" - response = await client.get("/api/v1/health/live") - assert response.status_code == 200 - data = response.json() - assert data.get("status") == "ok" - - @pytest.mark.asyncio - async def test_readiness_basic(self, client: AsyncClient) -> None: - """Readiness endpoint exists and responds 200 when ready.""" - response = await client.get("/api/v1/health/ready") - assert response.status_code == 200 - data = response.json() - assert data.get("status") == "ok" - - @pytest.mark.asyncio - async def test_liveness_is_fast(self, client: AsyncClient) -> None: - start = time.time() - r = await client.get("/api/v1/health/live") - assert r.status_code == 200 - assert time.time() - start < 1.0 - - @pytest.mark.asyncio - async def test_concurrent_liveness_fetch(self, client: AsyncClient) -> None: - tasks = [client.get("/api/v1/health/live") for _ in range(5)] - responses = await asyncio.gather(*tasks) - assert all(r.status_code == 200 for r in responses) - - @pytest.mark.asyncio - async def test_app_responds_during_load(self, client: AsyncClient, test_user: AsyncClient) -> None: - # Create some load with execution requests - async def create_load() -> int | None: - execution_request = { - "script": "print('Load test')", - "lang": "python", - "lang_version": "3.11" - } - try: - response = await test_user.post("/api/v1/execute", json=execution_request) - return response.status_code - except Exception: - return None - - # Start load generation - load_tasks = [create_load() for _ in range(5)] - - # Check readiness during load - r0 = await client.get("/api/v1/health/live") - assert r0.status_code == 200 - - # Wait for load tasks to complete - await asyncio.gather(*load_tasks, return_exceptions=True) - - # Check readiness after load - r1 = await client.get("/api/v1/health/live") - assert r1.status_code == 200 - - @pytest.mark.asyncio - async def test_nonexistent_health_routes_gone(self, client: AsyncClient) -> None: - for path in [ - "/api/v1/health/healthz", - "/api/v1/health/health", - "/api/v1/health/readyz", - ]: - r = await client.get(path) - assert r.status_code in (404, 405) - - @pytest.mark.asyncio - async def test_docs_endpoint_available(self, client: AsyncClient) -> None: - # Swagger UI may return 200 or 404 depending on config; ensure no 5xx - r = await client.get("/docs") - assert r.status_code < 500 diff --git a/backend/tests/integration/test_notifications_routes.py b/backend/tests/integration/test_notifications_routes.py deleted file mode 100644 index bac015cc..00000000 --- a/backend/tests/integration/test_notifications_routes.py +++ /dev/null @@ -1,402 +0,0 @@ -import pytest -from app.domain.enums.notification import ( - NotificationChannel, - NotificationSeverity, - NotificationStatus, -) -from app.schemas_pydantic.notification import ( - DeleteNotificationResponse, - NotificationListResponse, - NotificationSubscription, - SubscriptionsResponse, - UnreadCountResponse, -) -from app.services.notification_service import NotificationService -from dishka import AsyncContainer -from httpx import AsyncClient - - -@pytest.mark.integration -class TestNotificationRoutes: - """Test notification endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_notifications_require_authentication(self, client: AsyncClient) -> None: - """Test that notification endpoints require authentication.""" - # Try to access notifications without auth - response = await client.get("/api/v1/notifications") - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_list_user_notifications(self, test_user: AsyncClient) -> None: - """Test listing user's notifications.""" - # List notifications - response = await test_user.get("/api/v1/notifications?limit=10&offset=0") - assert response.status_code == 200 - - # Validate response structure - notifications_data = response.json() - notifications_response = NotificationListResponse(**notifications_data) - - # Verify basic fields - assert isinstance(notifications_response.notifications, list) - assert isinstance(notifications_response.total, int) - assert isinstance(notifications_response.unread_count, int) - - # If there are notifications, validate their structure per schema - for n in notifications_response.notifications: - assert n.notification_id - assert n.channel in list(NotificationChannel) - assert n.severity in ["low","medium","high","urgent"] - assert isinstance(n.tags, list) - assert n.status in list(NotificationStatus) - assert n.subject is not None - assert n.body is not None - assert n.created_at is not None - - @pytest.mark.asyncio - async def test_filter_notifications_by_status(self, test_user: AsyncClient) -> None: - """Test filtering notifications by status.""" - # Test different status filters - statuses = [ - NotificationStatus.READ, - NotificationStatus.DELIVERED, - NotificationStatus.SKIPPED, - ] - for status in statuses: - response = await test_user.get(f"/api/v1/notifications?status={status}&limit=5") - assert response.status_code == 200 - - notifications_data = response.json() - notifications_response = NotificationListResponse(**notifications_data) - - # All returned notifications should have the requested status - for notification in notifications_response.notifications: - assert notification.status == status - - @pytest.mark.asyncio - async def test_get_unread_count(self, test_user: AsyncClient) -> None: - """Test getting count of unread notifications.""" - # Get unread count - response = await test_user.get("/api/v1/notifications/unread-count") - assert response.status_code == 200 - - # Validate response - count_data = response.json() - unread_count = UnreadCountResponse(**count_data) - - assert isinstance(unread_count.unread_count, int) - assert unread_count.unread_count >= 0 - - # Note: listing cannot filter 'unread' directly; count is authoritative - - @pytest.mark.asyncio - async def test_mark_notification_as_read(self, test_user: AsyncClient) -> None: - """Test marking a notification as read.""" - # Get an unread notification - notifications_response = await test_user.get( - f"/api/v1/notifications?status={NotificationStatus.DELIVERED}&limit=1") - assert notifications_response.status_code == 200 - - notifications_data = notifications_response.json() - if notifications_data["total"] > 0 and notifications_data["notifications"]: - notification_id = notifications_data["notifications"][0]["notification_id"] - - # Mark as read - mark_response = await test_user.put(f"/api/v1/notifications/{notification_id}/read") - assert mark_response.status_code == 204 - - # Verify it's now marked as read - updated_response = await test_user.get("/api/v1/notifications") - assert updated_response.status_code == 200 - - updated_data = updated_response.json() - # Find the notification and check its status - for notif in updated_data["notifications"]: - if notif["notification_id"] == notification_id: - assert notif["status"] == "read" - break - - @pytest.mark.asyncio - async def test_mark_nonexistent_notification_as_read(self, test_user: AsyncClient) -> None: - """Test marking a non-existent notification as read.""" - # Try to mark non-existent notification as read - fake_notification_id = "00000000-0000-0000-0000-000000000000" - response = await test_user.put(f"/api/v1/notifications/{fake_notification_id}/read") - # Prefer 404; if backend returns 500, treat as unavailable feature - if response.status_code == 500: - pytest.skip("Backend returns 500 for unknown notification IDs") - assert response.status_code == 404 - - error_data = response.json() - assert "detail" in error_data - assert "not found" in error_data["detail"].lower() - - @pytest.mark.asyncio - async def test_mark_all_notifications_as_read( - self, test_user: AsyncClient, scope: AsyncContainer - ) -> None: - """Test marking all notifications as read.""" - # Get user_id and create a test notification to ensure we have something to mark - me_response = await test_user.get("/api/v1/auth/me") - assert me_response.status_code == 200 - user_id = me_response.json()["user_id"] - - notification_service = await scope.get(NotificationService) - # Delivery is now awaited synchronously - notification available immediately - await notification_service.create_notification( - user_id=user_id, - subject="Test notification", - body="Created for mark-all-read test", - tags=["test"], - severity=NotificationSeverity.LOW, - channel=NotificationChannel.IN_APP, - ) - - # Get initial unread count (notification created synchronously, available now) - initial_response = await test_user.get("/api/v1/notifications/unread-count") - assert initial_response.status_code == 200 - initial_count = initial_response.json()["unread_count"] - assert initial_count >= 1, "Expected at least one unread notification after create" - - # Mark all as read - mark_all_response = await test_user.post("/api/v1/notifications/mark-all-read") - assert mark_all_response.status_code == 204 - - # Verify strict decrease - no branching needed - final_response = await test_user.get("/api/v1/notifications/unread-count") - assert final_response.status_code == 200 - final_count = final_response.json()["unread_count"] - - assert final_count < initial_count, ( - f"mark-all-read must decrease unread count: was {initial_count}, now {final_count}" - ) - - @pytest.mark.asyncio - async def test_get_notification_subscriptions(self, test_user: AsyncClient) -> None: - """Test getting user's notification subscriptions.""" - # Get subscriptions - response = await test_user.get("/api/v1/notifications/subscriptions") - assert response.status_code == 200 - - # Validate response - subscriptions_data = response.json() - subscriptions_response = SubscriptionsResponse(**subscriptions_data) - - assert isinstance(subscriptions_response.subscriptions, list) - - # Check each subscription - for subscription in subscriptions_response.subscriptions: - assert isinstance(subscription, NotificationSubscription) - assert subscription.channel in list(NotificationChannel) - assert isinstance(subscription.enabled, bool) - assert subscription.user_id is not None - - # Validate optional fields present in the schema - assert isinstance(subscription.severities, list) - assert isinstance(subscription.include_tags, list) - assert isinstance(subscription.exclude_tags, list) - - # Check webhook URLs if present - if subscription.webhook_url: - assert isinstance(subscription.webhook_url, str) - assert subscription.webhook_url.startswith("http") - if subscription.slack_webhook: - assert isinstance(subscription.slack_webhook, str) - assert subscription.slack_webhook.startswith("http") - - @pytest.mark.asyncio - async def test_update_notification_subscription(self, test_user: AsyncClient) -> None: - """Test updating a notification subscription.""" - # Update in_app subscription - update_data = { - "enabled": True, - "severities": ["medium","high"], - "include_tags": ["execution"], - "exclude_tags": ["external_alert"] - } - - response = await test_user.put("/api/v1/notifications/subscriptions/in_app", json=update_data) - assert response.status_code == 200 - - # Validate response - updated_sub_data = response.json() - updated_subscription = NotificationSubscription(**updated_sub_data) - - assert updated_subscription.channel == "in_app" - assert updated_subscription.enabled == update_data["enabled"] - assert updated_subscription.severities == update_data["severities"] - assert updated_subscription.include_tags == update_data["include_tags"] - assert updated_subscription.exclude_tags == update_data["exclude_tags"] - - # Verify the update persisted - get_response = await test_user.get("/api/v1/notifications/subscriptions") - assert get_response.status_code == 200 - - subs_data = get_response.json() - for sub in subs_data["subscriptions"]: - if sub["channel"] == "in_app": - assert sub["enabled"] == update_data["enabled"] - assert sub["severities"] == update_data["severities"] - assert sub["include_tags"] == update_data["include_tags"] - assert sub["exclude_tags"] == update_data["exclude_tags"] - break - - @pytest.mark.asyncio - async def test_update_webhook_subscription(self, test_user: AsyncClient) -> None: - """Test updating webhook subscription with URL.""" - # Update webhook subscription - update_data = { - "enabled": True, - "webhook_url": "https://example.com/webhook/notifications", - "severities": ["medium","high"], - "include_tags": ["execution"], - "exclude_tags": [] - } - - response = await test_user.put("/api/v1/notifications/subscriptions/webhook", json=update_data) - assert response.status_code == 200 - - # Validate response - updated_sub_data = response.json() - updated_subscription = NotificationSubscription(**updated_sub_data) - - assert updated_subscription.channel == "webhook" - assert updated_subscription.enabled == update_data["enabled"] - assert updated_subscription.webhook_url == update_data["webhook_url"] - assert updated_subscription.severities == update_data["severities"] - - @pytest.mark.asyncio - async def test_update_slack_subscription(self, test_user: AsyncClient) -> None: - """Test updating Slack subscription with webhook.""" - # Update Slack subscription - update_data = { - "enabled": True, - "slack_webhook": "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXX", - "severities": ["high","urgent"], - "include_tags": ["execution","error"], - "exclude_tags": [] - } - - response = await test_user.put("/api/v1/notifications/subscriptions/slack", json=update_data) - # Slack subscription may be disabled by config; 422 indicates validation - assert response.status_code in [200, 422] - if response.status_code == 422: - err = response.json() - assert "detail" in err - return - # Validate response - updated_sub_data = response.json() - updated_subscription = NotificationSubscription(**updated_sub_data) - assert updated_subscription.channel == "slack" - assert updated_subscription.enabled == update_data["enabled"] - assert updated_subscription.slack_webhook == update_data["slack_webhook"] - assert updated_subscription.severities == update_data["severities"] - - @pytest.mark.asyncio - async def test_delete_notification(self, test_user: AsyncClient) -> None: - """Test deleting a notification.""" - # Get a notification to delete - notifications_response = await test_user.get("/api/v1/notifications?limit=1") - assert notifications_response.status_code == 200 - - notifications_data = notifications_response.json() - if notifications_data["total"] > 0 and notifications_data["notifications"]: - notification_id = notifications_data["notifications"][0]["notification_id"] - - # Delete the notification - delete_response = await test_user.delete(f"/api/v1/notifications/{notification_id}") - assert delete_response.status_code == 200 - - # Validate response - delete_data = delete_response.json() - delete_result = DeleteNotificationResponse(**delete_data) - assert "deleted" in delete_result.message.lower() - - # Verify it's deleted - list_response = await test_user.get("/api/v1/notifications") - assert list_response.status_code == 200 - - list_data = list_response.json() - # Should not find the deleted notification - notification_ids = [n["notification_id"] for n in list_data["notifications"]] - assert notification_id not in notification_ids - - @pytest.mark.asyncio - async def test_delete_nonexistent_notification(self, test_user: AsyncClient) -> None: - """Test deleting a non-existent notification.""" - # Try to delete non-existent notification - fake_notification_id = "00000000-0000-0000-0000-000000000000" - response = await test_user.delete(f"/api/v1/notifications/{fake_notification_id}") - assert response.status_code == 404 - - error_data = response.json() - assert "detail" in error_data - assert "not found" in error_data["detail"].lower() - - @pytest.mark.asyncio - async def test_notification_pagination(self, test_user: AsyncClient) -> None: - """Test notification pagination.""" - # Get first page - page1_response = await test_user.get("/api/v1/notifications?limit=5&offset=0") - assert page1_response.status_code == 200 - - page1_data = page1_response.json() - page1 = NotificationListResponse(**page1_data) - - # If there are more than 5 notifications, get second page - if page1.total > 5: - page2_response = await test_user.get("/api/v1/notifications?limit=5&offset=5") - assert page2_response.status_code == 200 - - page2_data = page2_response.json() - page2 = NotificationListResponse(**page2_data) - - # Verify pagination metadata via totals only - assert page2.total == page1.total - - # Notifications should be different - if page1.notifications and page2.notifications: - page1_ids = {n.notification_id for n in page1.notifications} - page2_ids = {n.notification_id for n in page2.notifications} - # Should have no overlap - assert len(page1_ids.intersection(page2_ids)) == 0 - - @pytest.mark.asyncio - async def test_notifications_isolation_between_users(self, test_user: AsyncClient, - test_admin: AsyncClient) -> None: - """Test that notifications are isolated between users.""" - # Get user's notifications - user_notifications_response = await test_user.get("/api/v1/notifications") - assert user_notifications_response.status_code == 200 - - user_notifications_data = user_notifications_response.json() - user_notification_ids = [n["notification_id"] for n in user_notifications_data["notifications"]] - - # Get admin's notifications - admin_notifications_response = await test_admin.get("/api/v1/notifications") - assert admin_notifications_response.status_code == 200 - - admin_notifications_data = admin_notifications_response.json() - admin_notification_ids = [n["notification_id"] for n in admin_notifications_data["notifications"]] - - # Notifications should be different (no overlap) - if user_notification_ids and admin_notification_ids: - assert len(set(user_notification_ids).intersection(set(admin_notification_ids))) == 0 - - @pytest.mark.asyncio - async def test_invalid_notification_channel(self, test_user: AsyncClient) -> None: - """Test updating subscription with invalid channel.""" - # Try invalid channel - update_data = { - "enabled": True, - "severities": ["medium"] - } - - response = await test_user.put("/api/v1/notifications/subscriptions/invalid_channel", json=update_data) - assert response.status_code in [400, 404, 422] diff --git a/backend/tests/integration/test_replay_routes.py b/backend/tests/integration/test_replay_routes.py deleted file mode 100644 index 7ef221a5..00000000 --- a/backend/tests/integration/test_replay_routes.py +++ /dev/null @@ -1,400 +0,0 @@ -from datetime import datetime, timedelta, timezone -from uuid import uuid4 - -import pytest -from app.domain.enums.events import EventType -from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType -from app.domain.replay.models import ReplayFilter -from app.schemas_pydantic.replay import CleanupResponse, ReplayRequest, ReplayResponse, SessionSummary -from app.schemas_pydantic.replay_models import ReplaySession -from httpx import AsyncClient - - -@pytest.mark.integration -class TestReplayRoutes: - """Test replay endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_replay_requires_admin_authentication(self, test_user: AsyncClient) -> None: - """Test that replay endpoints require admin authentication.""" - # test_user is authenticated but not admin - - # Try to access replay endpoints as non-admin - response = await test_user.get("/api/v1/replay/sessions") - assert response.status_code == 403 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["admin", "forbidden", "denied"]) - - @pytest.mark.asyncio - async def test_create_replay_session(self, test_admin: AsyncClient) -> None: - """Test creating a replay session.""" - # Create replay session - replay_request = ReplayRequest( - replay_type=ReplayType.QUERY, - target=ReplayTarget.KAFKA, - filter=ReplayFilter( - event_types=[EventType.EXECUTION_REQUESTED, EventType.EXECUTION_COMPLETED], - start_time=datetime.now(timezone.utc) - timedelta(days=7), - end_time=datetime.now(timezone.utc), - ), - speed_multiplier=1.0, - preserve_timestamps=True, - ).model_dump(mode="json") - - response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert response.status_code in [200, 422] - if response.status_code == 422: - return - - # Validate response - replay_data = response.json() - replay_response = ReplayResponse(**replay_data) - - assert replay_response.session_id is not None - assert len(replay_response.session_id) > 0 - assert replay_response.status in [ReplayStatus.CREATED] - assert replay_response.message is not None - - @pytest.mark.asyncio - async def test_list_replay_sessions(self, test_admin: AsyncClient) -> None: - """Test listing replay sessions.""" - # List replay sessions - response = await test_admin.get("/api/v1/replay/sessions?limit=10") - assert response.status_code in [200, 404] - if response.status_code != 200: - return - - # Validate response - sessions_data = response.json() - assert isinstance(sessions_data, list) - - for session_data in sessions_data: - session_summary = SessionSummary(**session_data) - assert session_summary.session_id - assert session_summary.status in list(ReplayStatus) - assert session_summary.created_at is not None - - @pytest.mark.asyncio - async def test_get_replay_session_details(self, test_admin: AsyncClient) -> None: - """Test getting detailed information about a replay session.""" - # Create a session first - replay_request = ReplayRequest( - replay_type=ReplayType.QUERY, - target=ReplayTarget.KAFKA, - filter=ReplayFilter( - event_types=[EventType.USER_LOGGED_IN], - start_time=datetime.now(timezone.utc) - timedelta(hours=24), - end_time=datetime.now(timezone.utc), - ), - speed_multiplier=2.0, - ).model_dump(mode="json") - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code == 200 - - session_id = create_response.json()["session_id"] - - # Get session details - detail_response = await test_admin.get(f"/api/v1/replay/sessions/{session_id}") - assert detail_response.status_code in [200, 404] - if detail_response.status_code != 200: - return - - # Validate detailed response - session_data = detail_response.json() - session = ReplaySession(**session_data) - assert session.session_id == session_id - assert session.status in list(ReplayStatus) - assert session.created_at is not None - - @pytest.mark.asyncio - async def test_start_replay_session(self, test_admin: AsyncClient) -> None: - """Test starting a replay session.""" - # Create a session - replay_request = ReplayRequest( - replay_type=ReplayType.QUERY, - target=ReplayTarget.KAFKA, - filter=ReplayFilter( - event_types=[EventType.SYSTEM_ERROR], - start_time=datetime.now(timezone.utc) - timedelta(hours=1), - end_time=datetime.now(timezone.utc), - ), - speed_multiplier=1.0, - ).model_dump(mode="json") - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code == 200 - - session_id = create_response.json()["session_id"] - - # Start the session - start_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/start") - assert start_response.status_code in [200, 404] - if start_response.status_code != 200: - return - - start_data = start_response.json() - start_result = ReplayResponse(**start_data) - - assert start_result.session_id == session_id - assert start_result.status in [ReplayStatus.RUNNING, ReplayStatus.COMPLETED] - assert start_result.message is not None - - @pytest.mark.asyncio - async def test_pause_and_resume_replay_session(self, test_admin: AsyncClient) -> None: - """Test pausing and resuming a replay session.""" - # Create and start a session - replay_request = ReplayRequest( - replay_type=ReplayType.QUERY, - target=ReplayTarget.KAFKA, - filter=ReplayFilter( - event_types=[EventType.SYSTEM_ERROR], - start_time=datetime.now(timezone.utc) - timedelta(hours=2), - end_time=datetime.now(timezone.utc), - ), - speed_multiplier=0.5, - ).model_dump(mode="json") - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code == 200 - - session_id = create_response.json()["session_id"] - - # Start the session - start_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/start") - assert start_response.status_code in [200, 404] - if start_response.status_code != 200: - return - - # Pause the session - pause_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/pause") - # Could succeed or fail if session already completed or not found - assert pause_response.status_code in [200, 400, 404] - - if pause_response.status_code == 200: - pause_data = pause_response.json() - pause_result = ReplayResponse(**pause_data) - - assert pause_result.session_id == session_id - assert pause_result.status in [ReplayStatus.PAUSED, ReplayStatus.COMPLETED] - - # If paused, try to resume - if pause_result.status == "paused": - resume_response = await test_admin.post( - f"/api/v1/replay/sessions/{session_id}/resume" - ) - assert resume_response.status_code == 200 - - resume_data = resume_response.json() - resume_result = ReplayResponse(**resume_data) - - assert resume_result.session_id == session_id - assert resume_result.status in [ReplayStatus.RUNNING, ReplayStatus.COMPLETED] - - @pytest.mark.asyncio - async def test_cancel_replay_session(self, test_admin: AsyncClient) -> None: - """Test cancelling a replay session.""" - # Create a session - replay_request = ReplayRequest( - replay_type=ReplayType.QUERY, - target=ReplayTarget.KAFKA, - filter=ReplayFilter( - event_types=[EventType.SYSTEM_ERROR], - start_time=datetime.now(timezone.utc) - timedelta(hours=1), - end_time=datetime.now(timezone.utc), - ), - speed_multiplier=1.0, - ).model_dump(mode="json") - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code == 200 - - session_id = create_response.json()["session_id"] - - # Cancel the session - cancel_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/cancel") - assert cancel_response.status_code in [200, 404] - if cancel_response.status_code != 200: - return - - cancel_data = cancel_response.json() - cancel_result = ReplayResponse(**cancel_data) - - assert cancel_result.session_id == session_id - assert cancel_result.status in [ReplayStatus.CANCELLED, ReplayStatus.COMPLETED] - assert cancel_result.message is not None - - @pytest.mark.asyncio - async def test_filter_sessions_by_status(self, test_admin: AsyncClient) -> None: - """Test filtering replay sessions by status.""" - # Test different status filters - for status in [ - ReplayStatus.CREATED, - ReplayStatus.RUNNING, - ReplayStatus.COMPLETED, - ReplayStatus.FAILED, - ReplayStatus.CANCELLED, - ]: - response = await test_admin.get(f"/api/v1/replay/sessions?status={status}&limit=5") - assert response.status_code in [200, 404] - if response.status_code != 200: - continue - - sessions_data = response.json() - assert isinstance(sessions_data, list) - - # All returned sessions should have the requested status - for session_data in sessions_data: - session = SessionSummary(**session_data) - assert session.status == status - - @pytest.mark.asyncio - async def test_cleanup_old_sessions(self, test_admin: AsyncClient) -> None: - """Test cleanup of old replay sessions.""" - # Cleanup sessions older than 24 hours - cleanup_response = await test_admin.post("/api/v1/replay/cleanup?older_than_hours=24") - assert cleanup_response.status_code == 200 - - cleanup_data = cleanup_response.json() - cleanup_result = CleanupResponse(**cleanup_data) - - # API returns removed_sessions - assert isinstance(cleanup_result.removed_sessions, int) - assert cleanup_result.message is not None - - @pytest.mark.asyncio - async def test_get_nonexistent_session(self, test_admin: AsyncClient) -> None: - """Test getting a non-existent replay session.""" - # Try to get non-existent session - fake_session_id = str(uuid4()) - response = await test_admin.get(f"/api/v1/replay/sessions/{fake_session_id}") - # Could return 404 or empty result - assert response.status_code in [200, 404] - - if response.status_code == 404: - error_data = response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_start_nonexistent_session(self, test_admin: AsyncClient) -> None: - """Test starting a non-existent replay session.""" - # Try to start non-existent session - fake_session_id = str(uuid4()) - response = await test_admin.post(f"/api/v1/replay/sessions/{fake_session_id}/start") - # Should fail - assert response.status_code in [400, 404] - - @pytest.mark.asyncio - async def test_replay_session_state_transitions(self, test_admin: AsyncClient) -> None: - """Test valid state transitions for replay sessions.""" - # Create a session - replay_request = { - "name": f"State Test Session {uuid4().hex[:8]}", - "description": "Testing state transitions", - "filters": { - "event_types": ["state.test.event"], - "start_time": (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat(), - "end_time": datetime.now(timezone.utc).isoformat() - }, - "target_topic": "state-test-topic", - "speed_multiplier": 1.0 - } - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code in [200, 422] - if create_response.status_code != 200: - return - - session_id = create_response.json()["session_id"] - initial_status = create_response.json()["status"] - assert initial_status == ReplayStatus.CREATED - - # Can't pause a session that hasn't started - pause_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/pause") - assert pause_response.status_code in [400, 409] # Invalid state transition - - # Can start from pending - start_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/start") - assert start_response.status_code == 200 - - # Can't start again if already running - start_again_response = await test_admin.post(f"/api/v1/replay/sessions/{session_id}/start") - assert start_again_response.status_code in [200, 400, 409] # Might be idempotent or error - - @pytest.mark.asyncio - async def test_replay_with_complex_filters(self, test_admin: AsyncClient) -> None: - """Test creating replay session with complex filters.""" - # Create session with complex filters - replay_request = { - "name": f"Complex Filter Session {uuid4().hex[:8]}", - "description": "Testing complex event filters", - "filters": { - "event_types": [ - "execution.requested", - "execution.started", - "execution.completed", - "execution.failed" - ], - "start_time": (datetime.now(timezone.utc) - timedelta(days=30)).isoformat(), - "end_time": datetime.now(timezone.utc).isoformat(), - "aggregate_id": str(uuid4()), - "correlation_id": str(uuid4()), - "service_name": "execution-service" - }, - "target_topic": "complex-filter-topic", - "speed_multiplier": 0.1, # Slow replay - "preserve_timing": False, - "batch_size": 100 - } - - response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert response.status_code in [200, 422] - if response.status_code != 200: - return - - replay_data = response.json() - replay_response = ReplayResponse(**replay_data) - - assert replay_response.session_id is not None - assert replay_response.status in ["created", "pending"] - - @pytest.mark.asyncio - async def test_replay_session_progress_tracking(self, test_admin: AsyncClient) -> None: - """Test tracking progress of replay sessions.""" - # Create and start a session - replay_request = { - "name": f"Progress Test Session {uuid4().hex[:8]}", - "description": "Testing progress tracking", - "filters": { - "event_types": ["progress.test.event"], - "start_time": (datetime.now(timezone.utc) - timedelta(minutes=30)).isoformat(), - "end_time": datetime.now(timezone.utc).isoformat() - }, - "target_topic": "progress-test-topic", - "speed_multiplier": 10.0 # Fast replay - } - - create_response = await test_admin.post("/api/v1/replay/sessions", json=replay_request) - assert create_response.status_code in [200, 422] - if create_response.status_code != 200: - return - - session_id = create_response.json()["session_id"] - - # Start the session - await test_admin.post(f"/api/v1/replay/sessions/{session_id}/start") - - # Check progress immediately - session state available right after start - detail_response = await test_admin.get(f"/api/v1/replay/sessions/{session_id}") - assert detail_response.status_code == 200 - session_data = detail_response.json() - session = ReplaySession(**session_data) - # Validate progress fields are present and valid - if session.replayed_events is not None and session.total_events is not None: - assert 0 <= session.replayed_events <= session.total_events - if session.total_events > 0: - progress = (session.replayed_events / session.total_events) * 100 - assert 0.0 <= progress <= 100.0 diff --git a/backend/tests/integration/test_saga_routes.py b/backend/tests/integration/test_saga_routes.py deleted file mode 100644 index cc015115..00000000 --- a/backend/tests/integration/test_saga_routes.py +++ /dev/null @@ -1,291 +0,0 @@ -import asyncio -import uuid - -import pytest -from app.domain.enums.saga import SagaState -from app.schemas_pydantic.saga import ( - SagaListResponse, - SagaStatusResponse, -) -from httpx import AsyncClient - - -class TestSagaRoutes: - """Test saga routes against the real backend.""" - - @pytest.mark.asyncio - async def test_get_saga_requires_auth(self, client: AsyncClient) -> None: - """Test that getting saga status requires authentication.""" - saga_id = str(uuid.uuid4()) - response = await client.get(f"/api/v1/sagas/{saga_id}") - assert response.status_code == 401 - assert "Not authenticated" in response.json()["detail"] - - @pytest.mark.asyncio - async def test_get_saga_not_found(self, test_user: AsyncClient) -> None: - """Test getting non-existent saga returns 404.""" - # Try to get non-existent saga - saga_id = str(uuid.uuid4()) - response = await test_user.get(f"/api/v1/sagas/{saga_id}") - assert response.status_code == 404 - assert "not found" in response.json()["detail"] - - @pytest.mark.asyncio - async def test_get_execution_sagas_requires_auth( - self, client: AsyncClient - ) -> None: - """Test that getting execution sagas requires authentication.""" - execution_id = str(uuid.uuid4()) - response = await client.get(f"/api/v1/sagas/execution/{execution_id}") - assert response.status_code == 401 - - @pytest.mark.asyncio - async def test_get_execution_sagas_empty(self, test_user: AsyncClient) -> None: - """Test getting sagas for execution with no sagas.""" - # Get sagas for non-existent execution - execution_id = str(uuid.uuid4()) - response = await test_user.get(f"/api/v1/sagas/execution/{execution_id}") - # Access to a random execution (non-owned) must be forbidden - assert response.status_code == 403 - - @pytest.mark.asyncio - async def test_get_execution_sagas_with_state_filter(self, test_user: AsyncClient) -> None: - """Test getting execution sagas filtered by state.""" - # Get sagas filtered by running state - execution_id = str(uuid.uuid4()) - response = await test_user.get( - f"/api/v1/sagas/execution/{execution_id}", - params={"state": SagaState.RUNNING} - ) - # Access denied for non-owned execution is valid - assert response.status_code in [200, 403] - if response.status_code == 403: - return - saga_list = SagaListResponse(**response.json()) - assert saga_list.total == 0 # No running sagas for this execution - - @pytest.mark.asyncio - async def test_list_sagas_requires_auth(self, client: AsyncClient) -> None: - """Test that listing sagas requires authentication.""" - response = await client.get("/api/v1/sagas/") - assert response.status_code == 401 - - @pytest.mark.asyncio - async def test_list_sagas_paginated(self, test_user: AsyncClient) -> None: - """Test listing sagas with pagination.""" - # List sagas with pagination - response = await test_user.get( - "/api/v1/sagas/", - params={"limit": 10, "offset": 0} - ) - assert response.status_code == 200 - - saga_list = SagaListResponse(**response.json()) - assert isinstance(saga_list.total, int) - assert isinstance(saga_list.sagas, list) - assert saga_list.total >= 0 - - @pytest.mark.asyncio - async def test_list_sagas_with_state_filter(self, test_user: AsyncClient) -> None: - """Test listing sagas filtered by state.""" - # List completed sagas - response = await test_user.get( - "/api/v1/sagas/", - params={"state": SagaState.COMPLETED, "limit": 5} - ) - assert response.status_code == 200 - - saga_list = SagaListResponse(**response.json()) - # All sagas should be completed if any exist - for saga in saga_list.sagas: - if saga.state: - assert saga.state == SagaState.COMPLETED - - @pytest.mark.asyncio - async def test_list_sagas_large_limit(self, test_user: AsyncClient) -> None: - """Test listing sagas with maximum limit.""" - # List with max limit - response = await test_user.get( - "/api/v1/sagas/", - params={"limit": 1000} - ) - assert response.status_code == 200 - - saga_list = SagaListResponse(**response.json()) - assert len(saga_list.sagas) <= 1000 - - @pytest.mark.asyncio - async def test_list_sagas_invalid_limit(self, test_user: AsyncClient) -> None: - """Test listing sagas with invalid limit.""" - # Try with limit too large - response = await test_user.get( - "/api/v1/sagas/", - params={"limit": 10000} - ) - assert response.status_code == 422 # Validation error - - @pytest.mark.asyncio - async def test_cancel_saga_requires_auth(self, client: AsyncClient) -> None: - """Test that cancelling saga requires authentication.""" - saga_id = str(uuid.uuid4()) - response = await client.post(f"/api/v1/sagas/{saga_id}/cancel") - assert response.status_code == 401 - - @pytest.mark.asyncio - async def test_cancel_saga_not_found(self, test_user: AsyncClient) -> None: - """Test cancelling non-existent saga returns 404.""" - # Try to cancel non-existent saga - saga_id = str(uuid.uuid4()) - response = await test_user.post(f"/api/v1/sagas/{saga_id}/cancel") - assert response.status_code == 404 - assert "not found" in response.json()["detail"] - - @pytest.mark.asyncio - async def test_saga_access_control( - self, - test_user: AsyncClient, - another_user: AsyncClient - ) -> None: - """Test that users can only access their own sagas.""" - # User 1 lists their sagas - response1 = await test_user.get("/api/v1/sagas/") - assert response1.status_code == 200 - user1_sagas = SagaListResponse(**response1.json()) - - # User 2 lists their sagas - response2 = await another_user.get("/api/v1/sagas/") - assert response2.status_code == 200 - user2_sagas = SagaListResponse(**response2.json()) - - # Each user should see only their own sagas - # (we can't verify the exact content without creating sagas, - # but we can verify the endpoint works correctly) - assert isinstance(user1_sagas.sagas, list) - assert isinstance(user2_sagas.sagas, list) - - @pytest.mark.asyncio - async def test_get_saga_with_details(self, test_user: AsyncClient) -> None: - """Test getting saga with all details when it exists.""" - # First list sagas to potentially find one - list_response = await test_user.get("/api/v1/sagas/", params={"limit": 1}) - assert list_response.status_code == 200 - saga_list = SagaListResponse(**list_response.json()) - - if saga_list.sagas and len(saga_list.sagas) > 0: - # Get details of the first saga - saga_id = saga_list.sagas[0].saga_id - response = await test_user.get(f"/api/v1/sagas/{saga_id}") - - # Could be 200 if accessible or 403 if not owned by user - assert response.status_code in [200, 403, 404] - - if response.status_code == 200: - saga_status = SagaStatusResponse(**response.json()) - assert saga_status.saga_id == saga_id - assert saga_status.state in list(SagaState) - - @pytest.mark.asyncio - async def test_list_sagas_with_offset(self, test_user: AsyncClient) -> None: - """Test listing sagas with offset for pagination.""" - # Get first page - response1 = await test_user.get( - "/api/v1/sagas/", - params={"limit": 5, "offset": 0} - ) - assert response1.status_code == 200 - page1 = SagaListResponse(**response1.json()) - - # Get second page - response2 = await test_user.get( - "/api/v1/sagas/", - params={"limit": 5, "offset": 5} - ) - assert response2.status_code == 200 - page2 = SagaListResponse(**response2.json()) - - # If there are sagas, verify pagination works - if page1.sagas and page2.sagas: - # Saga IDs should be different between pages - page1_ids = {s.saga_id for s in page1.sagas} - page2_ids = {s.saga_id for s in page2.sagas} - assert len(page1_ids.intersection(page2_ids)) == 0 - - @pytest.mark.asyncio - async def test_cancel_saga_invalid_state(self, test_user: AsyncClient) -> None: - """Test cancelling a saga in invalid state (if one exists).""" - # Try to find a completed saga to cancel - response = await test_user.get( - "/api/v1/sagas/", - params={"state": SagaState.COMPLETED, "limit": 1} - ) - assert response.status_code == 200 - saga_list = SagaListResponse(**response.json()) - - if saga_list.sagas and len(saga_list.sagas) > 0: - # Try to cancel completed saga (should fail) - saga_id = saga_list.sagas[0].saga_id - cancel_response = await test_user.post(f"/api/v1/sagas/{saga_id}/cancel") - # Should get 400 (invalid state) or 403 (access denied) or 404 - assert cancel_response.status_code in [400, 403, 404] - - @pytest.mark.asyncio - async def test_get_execution_sagas_multiple_states(self, test_user: AsyncClient) -> None: - """Test getting execution sagas across different states.""" - execution_id = str(uuid.uuid4()) - - # Test each state filter - for state in [SagaState.CREATED, SagaState.RUNNING, SagaState.COMPLETED, - SagaState.FAILED, SagaState.CANCELLED]: - response = await test_user.get( - f"/api/v1/sagas/execution/{execution_id}", - params={"state": state} - ) - assert response.status_code in [200, 403] - if response.status_code == 403: - continue - saga_list = SagaListResponse(**response.json()) - - # All returned sagas should match the requested state - for saga in saga_list.sagas: - if saga.state: - assert saga.state == state - - @pytest.mark.asyncio - async def test_saga_response_structure(self, test_user: AsyncClient) -> None: - """Test that saga responses have correct structure.""" - # List sagas to verify response structure - response = await test_user.get("/api/v1/sagas/", params={"limit": 1}) - assert response.status_code == 200 - - saga_list = SagaListResponse(**response.json()) - assert hasattr(saga_list, "sagas") - assert hasattr(saga_list, "total") - assert isinstance(saga_list.sagas, list) - assert isinstance(saga_list.total, int) - - # If we have sagas, verify their structure - if saga_list.sagas: - saga = saga_list.sagas[0] - assert hasattr(saga, "saga_id") - assert hasattr(saga, "execution_id") - assert hasattr(saga, "state") - assert hasattr(saga, "created_at") - - @pytest.mark.asyncio - async def test_concurrent_saga_access(self, test_user: AsyncClient) -> None: - """Test concurrent access to saga endpoints.""" - # Make multiple concurrent requests - tasks = [] - for i in range(5): - tasks.append(test_user.get( - "/api/v1/sagas/", - params={"limit": 10, "offset": i * 10} - )) - - responses = await asyncio.gather(*tasks) - - # All requests should succeed - for response in responses: - assert response.status_code == 200 - saga_list = SagaListResponse(**response.json()) - assert isinstance(saga_list.sagas, list) diff --git a/backend/tests/integration/test_saved_scripts_routes.py b/backend/tests/integration/test_saved_scripts_routes.py deleted file mode 100644 index 2561ad60..00000000 --- a/backend/tests/integration/test_saved_scripts_routes.py +++ /dev/null @@ -1,426 +0,0 @@ -from datetime import datetime, timezone -from uuid import UUID, uuid4 - -import pytest -from app.schemas_pydantic.saved_script import SavedScriptResponse -from httpx import AsyncClient - - -@pytest.mark.integration -class TestSavedScripts: - """Test saved scripts endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_create_script_requires_authentication(self, client: AsyncClient) -> None: - """Test that creating a saved script requires authentication.""" - script_data = { - "name": "Unauthenticated Script", - "script": "print('Should fail')", - "lang": "python", - "lang_version": "3.11" - } - - response = await client.post("/api/v1/scripts", json=script_data) - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_create_and_retrieve_saved_script(self, test_user: AsyncClient) -> None: - """Test creating and retrieving a saved script.""" - # Already authenticated via test_user fixture - - # Create a unique script - unique_id = str(uuid4())[:8] - script_data = { - "name": f"Test Script {unique_id}", - "script": f"# Script {unique_id}\nprint('Hello from saved script {unique_id}')", - "lang": "python", - "lang_version": "3.11", - "description": f"Test script created at {datetime.now(timezone.utc).isoformat()}" - } - - # Create the script (include CSRF header for POST request) - create_response = await test_user.post("/api/v1/scripts", json=script_data) - assert create_response.status_code in [200, 201] - - # Validate response structure - created_data = create_response.json() - saved_script = SavedScriptResponse(**created_data) - - # Verify all fields - assert saved_script.script_id is not None - assert len(saved_script.script_id) > 0 - - # Verify it's a valid UUID - try: - UUID(saved_script.script_id) - except ValueError: - pytest.fail(f"Invalid script_id format: {saved_script.script_id}") - - # Verify data matches request - assert saved_script.name == script_data["name"] - assert saved_script.script == script_data["script"] - assert saved_script.lang == script_data["lang"] - assert saved_script.lang_version == script_data["lang_version"] - assert saved_script.description == script_data["description"] - - # Verify timestamps - assert saved_script.created_at is not None - assert saved_script.updated_at is not None - - # Now retrieve the script by ID - get_response = await test_user.get(f"/api/v1/scripts/{saved_script.script_id}") - assert get_response.status_code == 200 - - retrieved_data = get_response.json() - retrieved_script = SavedScriptResponse(**retrieved_data) - - # Verify it matches what we created - assert retrieved_script.script_id == saved_script.script_id - assert retrieved_script.name == script_data["name"] - assert retrieved_script.script == script_data["script"] - - @pytest.mark.asyncio - async def test_list_user_scripts(self, test_user: AsyncClient) -> None: - """Test listing user's saved scripts.""" - # Already authenticated via test_user fixture - - # Create a few scripts - unique_id = str(uuid4())[:8] - scripts_to_create = [ - { - "name": f"List Test Script 1 {unique_id}", - "script": "print('Script 1')", - "lang": "python", - "lang_version": "3.11", - "description": "First script" - }, - { - "name": f"List Test Script 2 {unique_id}", - "script": "console.log('Script 2');", - "lang": "javascript", - "lang_version": "18", - "description": "Second script" - }, - { - "name": f"List Test Script 3 {unique_id}", - "script": "print('Script 3')", - "lang": "python", - "lang_version": "3.10" - } - ] - - created_ids = [] - for script_data in scripts_to_create: - create_response = await test_user.post("/api/v1/scripts", json=script_data) - if create_response.status_code in [200, 201]: - created_ids.append(create_response.json()["script_id"]) - - # List all scripts - list_response = await test_user.get("/api/v1/scripts") - assert list_response.status_code == 200 - - scripts_list = list_response.json() - assert isinstance(scripts_list, list) - - # Should have at least the scripts we just created - assert len(scripts_list) >= len(created_ids) - - # Validate structure of returned scripts - for script_data in scripts_list: - saved_script = SavedScriptResponse(**script_data) - assert saved_script.script_id is not None - assert saved_script.name is not None - assert saved_script.script is not None - assert saved_script.lang is not None - assert saved_script.lang_version is not None - - # Check that our created scripts are in the list - returned_ids = [script["script_id"] for script in scripts_list] - for created_id in created_ids: - assert created_id in returned_ids - - @pytest.mark.asyncio - async def test_update_saved_script(self, test_user: AsyncClient) -> None: - """Test updating a saved script.""" - # Already authenticated via test_user fixture - - # Create a script - unique_id = str(uuid4())[:8] - original_data = { - "name": f"Original Script {unique_id}", - "script": "print('Original content')", - "lang": "python", - "lang_version": "3.11", - "description": "Original description" - } - - create_response = await test_user.post("/api/v1/scripts", json=original_data) - assert create_response.status_code in [200, 201] - - created_script = create_response.json() - script_id = created_script["script_id"] - original_created_at = created_script["created_at"] - - # Update the script - updated_data = { - "name": f"Updated Script {unique_id}", - "script": "print('Updated content with more features')", - "lang": "python", - "lang_version": "3.12", - "description": "Updated description with more details" - } - - update_response = await test_user.put(f"/api/v1/scripts/{script_id}", json=updated_data) - assert update_response.status_code == 200 - - updated_script_data = update_response.json() - updated_script = SavedScriptResponse(**updated_script_data) - - # Verify updates were applied - assert updated_script.script_id == script_id # ID should not change - assert updated_script.name == updated_data["name"] - assert updated_script.script == updated_data["script"] - assert updated_script.lang == updated_data["lang"] - assert updated_script.lang_version == updated_data["lang_version"] - assert updated_script.description == updated_data["description"] - - # Verify created_at didn't change (normalize tz and millisecond precision) and updated_at did - orig_dt = datetime.fromisoformat(original_created_at.replace('Z', '+00:00')) - upd_dt = updated_script.created_at - if upd_dt.tzinfo is None: - upd_dt = upd_dt.replace(tzinfo=timezone.utc) - assert int(upd_dt.timestamp() * 1000) == int(orig_dt.timestamp() * 1000) - assert updated_script.updated_at > updated_script.created_at - - @pytest.mark.asyncio - async def test_delete_saved_script(self, test_user: AsyncClient) -> None: - """Test deleting a saved script.""" - # Already authenticated via test_user fixture - - # Create a script to delete - unique_id = str(uuid4())[:8] - script_data = { - "name": f"Script to Delete {unique_id}", - "script": "print('Delete me')", - "lang": "python", - "lang_version": "3.11", - "description": "This script will be deleted" - } - - create_response = await test_user.post("/api/v1/scripts", json=script_data) - assert create_response.status_code in [200, 201] - - script_id = create_response.json()["script_id"] - - # Delete the script - delete_response = await test_user.delete(f"/api/v1/scripts/{script_id}") - assert delete_response.status_code in [200, 204] - - # Verify it's deleted by trying to get it - get_response = await test_user.get(f"/api/v1/scripts/{script_id}") - assert get_response.status_code in [404, 403] - - if get_response.status_code == 404: - error_data = get_response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_cannot_access_other_users_scripts(self, test_user: AsyncClient, - test_admin: AsyncClient) -> None: - """Test that users cannot access scripts created by other users.""" - unique_id = str(uuid4())[:8] - user_script_data = { - "name": f"User Private Script {unique_id}", - "script": "print('Private to user')", - "lang": "python", - "lang_version": "3.11", - "description": "Should only be visible to creating user" - } - - create_response = await test_user.post("/api/v1/scripts", json=user_script_data) - assert create_response.status_code in [200, 201] - - user_script_id = create_response.json()["script_id"] - - # Try to access the user's script as admin - # This should fail unless admin has special permissions - get_response = await test_admin.get(f"/api/v1/scripts/{user_script_id}") - # Should be forbidden or not found - assert get_response.status_code in [403, 404] - - # List scripts as admin - should not include user's script - list_response = await test_admin.get("/api/v1/scripts") - assert list_response.status_code == 200 - - admin_scripts = list_response.json() - admin_script_ids = [s["script_id"] for s in admin_scripts] - # User's script should not be in admin's list - assert user_script_id not in admin_script_ids - - @pytest.mark.asyncio - async def test_script_with_invalid_language(self, test_user: AsyncClient) -> None: - """Test that invalid language/version combinations are handled.""" - unique_id = str(uuid4())[:8] - - # Try invalid language - invalid_lang_data = { - "name": f"Invalid Language Script {unique_id}", - "script": "print('test')", - "lang": "invalid_language", - "lang_version": "1.0" - } - - response = await test_user.post("/api/v1/scripts", json=invalid_lang_data) - # Backend may accept arbitrary lang values; accept any outcome - assert response.status_code in [200, 201, 400, 422] - - # Try unsupported version - unsupported_version_data = { - "name": f"Unsupported Version Script {unique_id}", - "script": "print('test')", - "lang": "python", - "lang_version": "2.7" # Python 2 likely not supported - } - - response = await test_user.post("/api/v1/scripts", json=unsupported_version_data) - # Might accept but warn, or reject - assert response.status_code in [200, 201, 400, 422] - - @pytest.mark.asyncio - async def test_script_name_constraints(self, test_user: AsyncClient) -> None: - """Test script name validation and constraints.""" - # Test empty name - empty_name_data = { - "name": "", - "script": "print('test')", - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.post("/api/v1/scripts", json=empty_name_data) - assert response.status_code in [200, 201, 400, 422] - - # Test very long name - long_name_data = { - "name": "x" * 1000, # Very long name - "script": "print('test')", - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.post("/api/v1/scripts", json=long_name_data) - # Should either accept or reject based on max length - if response.status_code in [400, 422]: - error_data = response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_script_content_size_limits(self, test_user: AsyncClient) -> None: - """Test script content size limits.""" - unique_id = str(uuid4())[:8] - - # Test reasonably large script (should succeed) - large_content = "# Large script\n" + "\n".join([f"print('Line {i}')" for i in range(1000)]) - large_script_data = { - "name": f"Large Script {unique_id}", - "script": large_content, - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.post("/api/v1/scripts", json=large_script_data) - assert response.status_code in [200, 201] - - # Test excessively large script (should fail) - huge_content = "x" * (1024 * 1024 * 10) # 10MB - huge_script_data = { - "name": f"Huge Script {unique_id}", - "script": huge_content, - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.post("/api/v1/scripts", json=huge_script_data) - # If backend returns 500 for oversized payload, skip as environment-specific - if response.status_code >= 500: - pytest.skip("Backend returned 5xx for oversized script upload") - assert response.status_code in [200, 201, 400, 413, 422] - - @pytest.mark.asyncio - async def test_update_nonexistent_script(self, test_user: AsyncClient) -> None: - """Test updating a non-existent script.""" - fake_script_id = "00000000-0000-0000-0000-000000000000" - - update_data = { - "name": "Won't Work", - "script": "print('This should fail')", - "lang": "python", - "lang_version": "3.11" - } - - response = await test_user.put(f"/api/v1/scripts/{fake_script_id}", json=update_data) - # Non-existent script must return 404/403 (no server error) - assert response.status_code in [404, 403] - - error_data = response.json() - assert "detail" in error_data - - @pytest.mark.asyncio - async def test_delete_nonexistent_script(self, test_user: AsyncClient) -> None: - """Test deleting a non-existent script.""" - fake_script_id = "00000000-0000-0000-0000-000000000000" - - response = await test_user.delete(f"/api/v1/scripts/{fake_script_id}") - # Could be 404 (not found) or 204 (idempotent delete) - assert response.status_code in [404, 403, 204] - - @pytest.mark.asyncio - async def test_scripts_persist_across_sessions(self, test_user: AsyncClient) -> None: - """Test that scripts persist across login sessions.""" - unique_id = str(uuid4())[:8] - script_data = { - "name": f"Persistent Script {unique_id}", - "script": "print('Should persist')", - "lang": "python", - "lang_version": "3.11", - "description": "Testing persistence" - } - - create_response = await test_user.post("/api/v1/scripts", json=script_data) - assert create_response.status_code in [200, 201] - - script_id = create_response.json()["script_id"] - - # Get username before logout so we can re-login - me_response = await test_user.get("/api/v1/auth/me") - assert me_response.status_code == 200 - username = me_response.json()["username"] - - # Logout - this clears cookies via Set-Cookie response - logout_response = await test_user.post("/api/v1/auth/logout") - assert logout_response.status_code == 200 - - # Re-login to get fresh authentication - login_response = await test_user.post( - "/api/v1/auth/login", - data={"username": username, "password": "TestPass123!"}, - ) - assert login_response.status_code == 200 - - # Update CSRF header from new session - csrf_token = login_response.json().get("csrf_token", "") - test_user.headers["X-CSRF-Token"] = csrf_token - - # Script should still exist after logout/login cycle - get_response = await test_user.get(f"/api/v1/scripts/{script_id}") - assert get_response.status_code == 200 - - retrieved_script = SavedScriptResponse(**get_response.json()) - assert retrieved_script.script_id == script_id - assert retrieved_script.name == script_data["name"] - assert retrieved_script.script == script_data["script"] diff --git a/backend/tests/integration/test_sse_routes.py b/backend/tests/integration/test_sse_routes.py deleted file mode 100644 index 9a902a54..00000000 --- a/backend/tests/integration/test_sse_routes.py +++ /dev/null @@ -1,268 +0,0 @@ -"""SSE integration tests - precise verification of Redis pub/sub and stream behavior.""" - -import json -from contextlib import aclosing -from datetime import datetime, timezone -from typing import Any -from uuid import uuid4 - -import pytest -from app.domain.enums.events import EventType -from app.domain.enums.notification import NotificationSeverity, NotificationStatus -from app.domain.enums.sse import SSEControlEvent, SSENotificationEvent -from app.domain.events.typed import EventMetadata, PodCreatedEvent -from app.schemas_pydantic.sse import ( - RedisNotificationMessage, - RedisSSEMessage, - SSEHealthResponse, -) -from app.services.sse.redis_bus import SSERedisBus -from app.services.sse.sse_service import SSEService -from dishka import AsyncContainer -from httpx import AsyncClient - - -@pytest.mark.integration -class TestSSEAuth: - """SSE endpoints require authentication.""" - - @pytest.mark.asyncio - async def test_notification_stream_requires_auth(self, client: AsyncClient) -> None: - assert (await client.get("/api/v1/events/notifications/stream")).status_code == 401 - - @pytest.mark.asyncio - async def test_execution_stream_requires_auth(self, client: AsyncClient) -> None: - assert (await client.get(f"/api/v1/events/executions/{uuid4()}")).status_code == 401 - - @pytest.mark.asyncio - async def test_health_requires_auth(self, client: AsyncClient) -> None: - assert (await client.get("/api/v1/events/health")).status_code == 401 - - -@pytest.mark.integration -class TestSSEHealth: - """SSE health endpoint.""" - - @pytest.mark.asyncio - async def test_returns_valid_health_status(self, test_user: AsyncClient) -> None: - r = await test_user.get("/api/v1/events/health") - assert r.status_code == 200 - health = SSEHealthResponse.model_validate(r.json()) - assert health.status in ("healthy", "degraded", "unhealthy", "draining") - assert health.active_connections >= 0 - - -@pytest.mark.integration -class TestRedisPubSubExecution: - """Redis pub/sub for execution events - verifies message structure and delivery.""" - - @pytest.mark.asyncio - async def test_publish_wraps_event_in_redis_message(self, scope: AsyncContainer) -> None: - """publish_event wraps BaseEvent in RedisSSEMessage with correct structure.""" - bus: SSERedisBus = await scope.get(SSERedisBus) - exec_id = f"exec-{uuid4().hex[:8]}" - - subscription = await bus.open_subscription(exec_id) - - event = PodCreatedEvent( - execution_id=exec_id, - pod_name="test-pod", - namespace="test-ns", - metadata=EventMetadata(service_name="test", service_version="1.0"), - ) - await bus.publish_event(exec_id, event) - - # Verify the wrapper structure - received: RedisSSEMessage | None = await subscription.get(RedisSSEMessage) - await subscription.close() - - assert received is not None - assert received.event_type == EventType.POD_CREATED - assert received.execution_id == exec_id - assert received.data["pod_name"] == "test-pod" - assert received.data["namespace"] == "test-ns" - - @pytest.mark.asyncio - async def test_channel_isolation(self, scope: AsyncContainer) -> None: - """Different execution_ids use isolated channels.""" - bus: SSERedisBus = await scope.get(SSERedisBus) - exec_a, exec_b = f"exec-a-{uuid4().hex[:6]}", f"exec-b-{uuid4().hex[:6]}" - - sub_a = await bus.open_subscription(exec_a) - sub_b = await bus.open_subscription(exec_b) - - event = PodCreatedEvent( - execution_id=exec_a, - pod_name="pod-a", - namespace="default", - metadata=EventMetadata(service_name="test", service_version="1"), - ) - await bus.publish_event(exec_a, event) - - received_a = await sub_a.get(RedisSSEMessage) - received_b = await sub_b.get(RedisSSEMessage) - - await sub_a.close() - await sub_b.close() - - assert received_a is not None - assert received_a.data["pod_name"] == "pod-a" - assert received_b is None # B should not receive A's message - - -@pytest.mark.integration -class TestRedisPubSubNotification: - """Redis pub/sub for notifications - verifies message structure and delivery.""" - - @pytest.mark.asyncio - async def test_publish_sends_notification_directly(self, scope: AsyncContainer) -> None: - """publish_notification sends RedisNotificationMessage JSON directly.""" - bus: SSERedisBus = await scope.get(SSERedisBus) - user_id = f"user-{uuid4().hex[:8]}" - - subscription = await bus.open_notification_subscription(user_id) - - notification = RedisNotificationMessage( - notification_id="notif-123", - severity=NotificationSeverity.HIGH, - status=NotificationStatus.PENDING, - tags=["urgent", "system"], - subject="Test Alert", - body="This is a test notification", - action_url="https://example.com/action", - created_at=datetime(2024, 6, 15, 12, 0, 0, tzinfo=timezone.utc), - ) - await bus.publish_notification(user_id, notification) - - received: RedisNotificationMessage | None = await subscription.get(RedisNotificationMessage) - await subscription.close() - - assert received is not None - assert received.notification_id == "notif-123" - assert received.severity == NotificationSeverity.HIGH - assert received.status == NotificationStatus.PENDING - assert received.tags == ["urgent", "system"] - assert received.subject == "Test Alert" - assert received.body == "This is a test notification" - assert received.action_url == "https://example.com/action" - - @pytest.mark.asyncio - async def test_user_channel_isolation(self, scope: AsyncContainer) -> None: - """Different user_ids use isolated channels.""" - bus: SSERedisBus = await scope.get(SSERedisBus) - user_a, user_b = f"user-a-{uuid4().hex[:6]}", f"user-b-{uuid4().hex[:6]}" - - sub_a = await bus.open_notification_subscription(user_a) - sub_b = await bus.open_notification_subscription(user_b) - - notification = RedisNotificationMessage( - notification_id="for-user-a", - severity=NotificationSeverity.LOW, - status=NotificationStatus.PENDING, - tags=[], - subject="Private", - body="For user A only", - action_url="", - created_at=datetime.now(timezone.utc), - ) - await bus.publish_notification(user_a, notification) - - received_a = await sub_a.get(RedisNotificationMessage) - received_b = await sub_b.get(RedisNotificationMessage) - - await sub_a.close() - await sub_b.close() - - assert received_a is not None - assert received_a.notification_id == "for-user-a" - assert received_b is None # B should not receive A's notification - - -@pytest.mark.integration -class TestSSEStreamEvents: - """SSE stream control events - verifies event structure without pub/sub.""" - - @pytest.mark.asyncio - async def test_notification_stream_yields_connected_then_subscribed(self, scope: AsyncContainer) -> None: - """Notification stream yields CONNECTED and SUBSCRIBED with correct fields.""" - sse_service: SSEService = await scope.get(SSEService) - user_id = f"user-{uuid4().hex[:8]}" - - events: list[dict[str, Any]] = [] - async with aclosing(sse_service.create_notification_stream(user_id)) as stream: - async for raw in stream: - if "data" in raw: - events.append(json.loads(raw["data"])) - if len(events) >= 2: - break - - # Verify CONNECTED event structure - connected = events[0] - assert connected["event_type"] == SSENotificationEvent.CONNECTED - assert connected["user_id"] == user_id - assert "timestamp" in connected - assert connected["message"] == "Connected to notification stream" - - # Verify SUBSCRIBED event structure - subscribed = events[1] - assert subscribed["event_type"] == SSENotificationEvent.SUBSCRIBED - assert subscribed["user_id"] == user_id - assert "timestamp" in subscribed - assert subscribed["message"] == "Redis subscription established" - - @pytest.mark.asyncio - async def test_execution_stream_yields_connected_then_subscribed(self, scope: AsyncContainer) -> None: - """Execution stream yields CONNECTED and SUBSCRIBED with correct fields.""" - sse_service: SSEService = await scope.get(SSEService) - exec_id = f"exec-{uuid4().hex[:8]}" - user_id = f"user-{uuid4().hex[:8]}" - - events: list[dict[str, Any]] = [] - async with aclosing(sse_service.create_execution_stream(exec_id, user_id)) as stream: - async for raw in stream: - if "data" in raw: - events.append(json.loads(raw["data"])) - if len(events) >= 2: - break - - # Verify CONNECTED event structure - connected = events[0] - assert connected["event_type"] == SSEControlEvent.CONNECTED - assert connected["execution_id"] == exec_id - assert "connection_id" in connected - assert connected["connection_id"].startswith(f"sse_{exec_id}_") - assert "timestamp" in connected - - # Verify SUBSCRIBED event structure - subscribed = events[1] - assert subscribed["event_type"] == SSEControlEvent.SUBSCRIBED - assert subscribed["execution_id"] == exec_id - assert "timestamp" in subscribed - assert subscribed["message"] == "Redis subscription established" - - @pytest.mark.asyncio - async def test_concurrent_streams_get_unique_connection_ids(self, scope: AsyncContainer) -> None: - """Each stream connection gets a unique connection_id.""" - import asyncio - - sse_service: SSEService = await scope.get(SSEService) - exec_id = f"exec-{uuid4().hex[:8]}" - - async def get_connection_id(user_id: str) -> str: - async with aclosing(sse_service.create_execution_stream(exec_id, user_id)) as stream: - async for raw in stream: - if "data" in raw: - data = json.loads(raw["data"]) - if data.get("event_type") == SSEControlEvent.CONNECTED: - return str(data["connection_id"]) - return "" - - conn_ids = await asyncio.gather( - get_connection_id("user-1"), - get_connection_id("user-2"), - get_connection_id("user-3"), - ) - - # All connection IDs should be unique - assert len(set(conn_ids)) == 3 - assert all(cid.startswith(f"sse_{exec_id}_") for cid in conn_ids) diff --git a/backend/tests/integration/test_user_settings_routes.py b/backend/tests/integration/test_user_settings_routes.py deleted file mode 100644 index 9338346f..00000000 --- a/backend/tests/integration/test_user_settings_routes.py +++ /dev/null @@ -1,414 +0,0 @@ -from datetime import datetime, timezone -from typing import TypedDict - -import pytest -from app.schemas_pydantic.user_settings import SettingsHistoryResponse, UserSettings -from httpx import AsyncClient - - -class _NotificationSettings(TypedDict): - execution_completed: bool - execution_failed: bool - system_updates: bool - security_alerts: bool - channels: list[str] - - -class _EditorSettings(TypedDict): - theme: str - font_size: int - tab_size: int - use_tabs: bool - word_wrap: bool - show_line_numbers: bool - - -class _UpdateSettingsData(TypedDict, total=False): - theme: str - timezone: str - date_format: str - time_format: str - notifications: _NotificationSettings - editor: _EditorSettings - custom_settings: dict[str, str] - - -# Force these tests to run sequentially on a single worker to avoid state conflicts -pytestmark = pytest.mark.xdist_group(name="user_settings") - - -@pytest.mark.integration -class TestUserSettingsRoutes: - """Test user settings endpoints against real backend.""" - - @pytest.mark.asyncio - async def test_user_settings_require_authentication(self, client: AsyncClient) -> None: - """Test that user settings endpoints require authentication.""" - # Try to access settings without auth - response = await client.get("/api/v1/user/settings/") - assert response.status_code == 401 - - error_data = response.json() - assert "detail" in error_data - assert any(word in error_data["detail"].lower() - for word in ["not authenticated", "unauthorized", "login"]) - - @pytest.mark.asyncio - async def test_get_user_settings(self, test_user: AsyncClient) -> None: - """Test getting user settings.""" - # Already authenticated via test_user fixture - - # Get user settings - response = await test_user.get("/api/v1/user/settings/") - assert response.status_code == 200 - - # Validate response structure - settings_data = response.json() - settings = UserSettings(**settings_data) - - # Verify required fields - assert settings.user_id is not None - assert settings.theme in ["light", "dark", "auto", "system"] - # Language field may not be present in all deployments - if hasattr(settings, "language"): - assert isinstance(settings.language, str) - assert isinstance(settings.timezone, str) - - # Verify notification settings (API uses execution_* and security_alerts fields) - assert settings.notifications is not None - assert isinstance(settings.notifications.execution_completed, bool) - assert isinstance(settings.notifications.execution_failed, bool) - assert isinstance(settings.notifications.system_updates, bool) - assert isinstance(settings.notifications.security_alerts, bool) - - # Verify editor settings - assert settings.editor is not None - assert isinstance(settings.editor.font_size, int) - assert 8 <= settings.editor.font_size <= 32 - assert settings.editor.theme in ["auto", "one-dark", "monokai", "github", "dracula", "solarized", "vs", - "vscode"] - assert isinstance(settings.editor.tab_size, int) - assert settings.editor.tab_size in [2, 4, 8] - assert isinstance(settings.editor.word_wrap, bool) - assert isinstance(settings.editor.show_line_numbers, bool) - - # Verify timestamp fields - assert settings.created_at is not None - assert settings.updated_at is not None - - # Custom settings might be empty or contain user preferences - if settings.custom_settings: - assert isinstance(settings.custom_settings, dict) - - @pytest.mark.asyncio - async def test_update_user_settings(self, test_user: AsyncClient) -> None: - """Test updating user settings.""" - # Already authenticated via test_user fixture - - # Get current settings to preserve original values - original_response = await test_user.get("/api/v1/user/settings/") - assert original_response.status_code == 200 - original_settings = original_response.json() - - # Update settings - update_data: _UpdateSettingsData = { - "theme": "dark" if original_settings["theme"] == "light" else "light", - "timezone": "America/New_York" if original_settings["timezone"] != "America/New_York" else "UTC", - "date_format": "MM/DD/YYYY", - "time_format": "12h", - "notifications": { - "execution_completed": False, - "execution_failed": True, - "system_updates": True, - "security_alerts": True, - "channels": ["in_app", "webhook"] - }, - "editor": { - "theme": "monokai", - "font_size": 14, - "tab_size": 4, - "use_tabs": False, - "word_wrap": True, - "show_line_numbers": True - } - } - - response = await test_user.put("/api/v1/user/settings/", json=update_data) - if response.status_code != 200: - pytest.fail(f"Status: {response.status_code}, Body: {response.json()}, Data: {update_data}") - assert response.status_code == 200 - - # Validate updated settings - updated_settings = UserSettings(**response.json()) - assert updated_settings.theme == update_data["theme"] - assert updated_settings.timezone == update_data["timezone"] - assert updated_settings.date_format == update_data["date_format"] - assert updated_settings.time_format == update_data["time_format"] - - # Verify notification settings were updated - assert updated_settings.notifications.execution_completed == update_data["notifications"][ - "execution_completed"] - assert updated_settings.notifications.execution_failed == update_data["notifications"]["execution_failed"] - assert updated_settings.notifications.system_updates == update_data["notifications"]["system_updates"] - assert updated_settings.notifications.security_alerts == update_data["notifications"]["security_alerts"] - assert "in_app" in [str(c) for c in updated_settings.notifications.channels] - - # Verify editor settings were updated - assert updated_settings.editor.theme == update_data["editor"]["theme"] - assert updated_settings.editor.font_size == update_data["editor"]["font_size"] - assert updated_settings.editor.tab_size == update_data["editor"]["tab_size"] - assert updated_settings.editor.word_wrap == update_data["editor"]["word_wrap"] - assert updated_settings.editor.show_line_numbers == update_data["editor"]["show_line_numbers"] - - @pytest.mark.asyncio - async def test_update_theme_only(self, test_user: AsyncClient) -> None: - """Test updating only the theme setting.""" - # Already authenticated via test_user fixture - - # Get current theme - original_response = await test_user.get("/api/v1/user/settings/") - assert original_response.status_code == 200 - original_theme = original_response.json()["theme"] - - # Update theme - new_theme = "dark" if original_theme != "dark" else "light" - theme_update = { - "theme": new_theme - } - - response = await test_user.put("/api/v1/user/settings/theme", json=theme_update) - assert response.status_code == 200 - - # Validate updated settings - updated_payload = response.json() - updated_settings = UserSettings(**updated_payload) - assert updated_settings.theme == new_theme - - # Other settings should remain unchanged (language optional) - if "language" in original_response.json(): - assert updated_payload.get("language") == original_response.json()["language"] - assert updated_settings.timezone == original_response.json()["timezone"] - - @pytest.mark.asyncio - async def test_update_notification_settings_only(self, test_user: AsyncClient) -> None: - """Test updating only notification settings.""" - # Already authenticated via test_user fixture - - # Update notification settings - notification_update = { - "execution_completed": True, - "execution_failed": True, - "system_updates": False, - "security_alerts": True, - "channels": ["in_app"] - } - - response = await test_user.put("/api/v1/user/settings/notifications", json=notification_update) - if response.status_code >= 500: - pytest.skip("Notification settings update not available in this environment") - assert response.status_code == 200 - - # Validate updated settings - updated_settings = UserSettings(**response.json()) - assert updated_settings.notifications.execution_completed == notification_update["execution_completed"] - assert updated_settings.notifications.execution_failed == notification_update["execution_failed"] - assert updated_settings.notifications.system_updates == notification_update["system_updates"] - assert updated_settings.notifications.security_alerts == notification_update["security_alerts"] - assert "in_app" in [str(c) for c in updated_settings.notifications.channels] - - @pytest.mark.asyncio - async def test_update_editor_settings_only(self, test_user: AsyncClient) -> None: - """Test updating only editor settings.""" - # Already authenticated via test_user fixture - - # Update editor settings - editor_update = { - "theme": "dracula", - "font_size": 16, - "tab_size": 2, - "use_tabs": False, - "word_wrap": False, - "show_line_numbers": True - } - - response = await test_user.put("/api/v1/user/settings/editor", json=editor_update) - if response.status_code >= 500: - pytest.skip("Editor settings update not available in this environment") - assert response.status_code == 200 - - # Validate updated settings - updated_settings = UserSettings(**response.json()) - assert updated_settings.editor.theme == editor_update["theme"] - assert updated_settings.editor.font_size == editor_update["font_size"] - assert updated_settings.editor.tab_size == editor_update["tab_size"] - assert updated_settings.editor.word_wrap == editor_update["word_wrap"] - assert updated_settings.editor.show_line_numbers == editor_update["show_line_numbers"] - - @pytest.mark.asyncio - async def test_update_custom_setting(self, test_user: AsyncClient) -> None: - """Test updating a custom setting.""" - # Update custom settings via main settings endpoint - custom_key = "custom_preference" - custom_value = "custom_value_123" - update_data = { - "custom_settings": { - custom_key: custom_value - } - } - - response = await test_user.put("/api/v1/user/settings/", json=update_data) - assert response.status_code == 200 - - # Validate updated settings - updated_settings = UserSettings(**response.json()) - assert custom_key in updated_settings.custom_settings - assert updated_settings.custom_settings[custom_key] == custom_value - - @pytest.mark.asyncio - async def test_get_settings_history(self, test_user: AsyncClient) -> None: - """Test getting settings change history.""" - # Make some changes to build history (theme change) - theme_update = {"theme": "dark"} - response = await test_user.put("/api/v1/user/settings/theme", json=theme_update) - if response.status_code >= 500: - pytest.skip("Settings history not available in this environment") - - # Get history - history_response = await test_user.get("/api/v1/user/settings/history") - if history_response.status_code >= 500: - pytest.skip("Settings history endpoint not available in this environment") - assert history_response.status_code == 200 - - # Validate history structure - history = SettingsHistoryResponse(**history_response.json()) - assert isinstance(history.history, list) - - # If we have history entries, validate them - for entry in history.history: - assert entry.timestamp is not None - - @pytest.mark.asyncio - async def test_restore_settings_to_previous_point(self, test_user: AsyncClient) -> None: - """Test restoring settings to a previous point in time.""" - # Get original settings - original_resp = await test_user.get("/api/v1/user/settings/") - assert original_resp.status_code == 200 - original_theme = original_resp.json()["theme"] - - # Make a change - new_theme = "dark" if original_theme != "dark" else "light" - await test_user.put("/api/v1/user/settings/theme", json={"theme": new_theme}) - - # Get restore point - timestamps are monotonic by definition - restore_point = datetime.now(timezone.utc).isoformat() - - # Make another change - second_theme = "auto" if new_theme != "auto" else "system" - await test_user.put("/api/v1/user/settings/theme", json={"theme": second_theme}) - - # Try to restore to the restore point - restore_data = {"timestamp": restore_point} - restore_resp = await test_user.post("/api/v1/user/settings/restore", json=restore_data) - - # Skip if restore functionality not available - if restore_resp.status_code >= 500: - pytest.skip("Settings restore not available in this environment") - - # If successful, verify the theme was restored - if restore_resp.status_code == 200: - current_resp = await test_user.get("/api/v1/user/settings/") - # Since restore might not work exactly as expected in test environment, - # just verify we get valid settings back - assert current_resp.status_code == 200 - - @pytest.mark.asyncio - async def test_invalid_theme_value(self, test_user: AsyncClient) -> None: - """Test that invalid theme values are rejected.""" - # Already authenticated via test_user fixture - - # Try to update with invalid theme - invalid_theme = {"theme": "invalid_theme"} - - response = await test_user.put("/api/v1/user/settings/theme", json=invalid_theme) - if response.status_code >= 500: - pytest.skip("Theme validation not available in this environment") - assert response.status_code in [400, 422] - - @pytest.mark.asyncio - async def test_invalid_editor_settings(self, test_user: AsyncClient) -> None: - """Test that invalid editor settings are rejected.""" - # Already authenticated via test_user fixture - - # Try to update with invalid editor settings - invalid_editor = { - "theme": "dracula", - "font_size": 100, # Invalid: out of range - "tab_size": 3, # Invalid: not 2, 4, or 8 - "use_tabs": False, - "word_wrap": True, - "show_line_numbers": True - } - - response = await test_user.put("/api/v1/user/settings/editor", json=invalid_editor) - if response.status_code >= 500: - pytest.skip("Editor validation not available in this environment") - assert response.status_code in [400, 422] - - @pytest.mark.asyncio - async def test_settings_isolation_between_users(self, - test_user: AsyncClient, - another_user: AsyncClient) -> None: - """Test that settings are isolated between users.""" - - # Update first user's settings - user1_update = { - "theme": "dark", - "timezone": "America/New_York" - } - response = await test_user.put("/api/v1/user/settings/", json=user1_update) - assert response.status_code == 200 - - # Get second user's settings - response = await another_user.get("/api/v1/user/settings/") - assert response.status_code == 200 - user2_settings = response.json() - - # Verify second user's settings are not affected by first user's changes - # Second user should have default settings, not the first user's custom settings - assert user2_settings["theme"] != user1_update["theme"] or user2_settings["timezone"] != user1_update[ - "timezone"] - - @pytest.mark.asyncio - async def test_settings_persistence(self, test_user: AsyncClient) -> None: - """Test that settings persist after being saved.""" - # Update settings - editor_settings: _EditorSettings = { - "theme": "github", - "font_size": 18, - "tab_size": 8, - "use_tabs": True, - "word_wrap": False, - "show_line_numbers": False - } - update_data: _UpdateSettingsData = { - "theme": "dark", - "timezone": "Europe/London", - "editor": editor_settings - } - - response = await test_user.put("/api/v1/user/settings/", json=update_data) - assert response.status_code == 200 - - # Get settings again to verify persistence - response = await test_user.get("/api/v1/user/settings/") - assert response.status_code == 200 - persisted_settings = UserSettings(**response.json()) - - # Verify settings persisted - assert persisted_settings.theme == update_data["theme"] - assert persisted_settings.timezone == update_data["timezone"] - assert persisted_settings.editor.theme == editor_settings["theme"] - assert persisted_settings.editor.font_size == editor_settings["font_size"] - assert persisted_settings.editor.tab_size == editor_settings["tab_size"] - assert persisted_settings.editor.word_wrap == editor_settings["word_wrap"] - assert persisted_settings.editor.show_line_numbers == editor_settings["show_line_numbers"] diff --git a/backend/tests/unit/core/test_csrf.py b/backend/tests/unit/core/test_csrf.py index eb5e3816..674d49d4 100644 --- a/backend/tests/unit/core/test_csrf.py +++ b/backend/tests/unit/core/test_csrf.py @@ -5,8 +5,13 @@ from starlette.requests import Request -def make_request(method: str, path: str, headers: dict[str, str] | None = None, - cookies: dict[str, str] | None = None) -> Request: +def make_request( + method: str, + path: str, + headers: dict[str, str] | None = None, + cookies: dict[str, str] | None = None, +) -> Request: + """Create a mock Starlette Request for testing.""" headers = headers or {} if cookies: cookie_header = "; ".join(f"{k}={v}" for k, v in cookies.items()) @@ -20,26 +25,127 @@ def make_request(method: str, path: str, headers: dict[str, str] | None = None, return Request(scope) -def test_csrf_skips_on_get(test_settings: Settings) -> None: - security_service = SecurityService(test_settings) - req = make_request("GET", "/api/v1/anything") - assert security_service.validate_csrf_from_request(req) == "skip" +class TestCSRFTokenGeneration: + """Tests for CSRF token generation.""" + def test_generates_token_with_sufficient_entropy( + self, test_settings: Settings + ) -> None: + """CSRF token is generated with sufficient entropy.""" + security = SecurityService(test_settings) -def test_csrf_missing_header_raises_when_authenticated(test_settings: Settings) -> None: - security_service = SecurityService(test_settings) - req = make_request("POST", "/api/v1/items", cookies={"access_token": "tok", "csrf_token": "abc"}) - with pytest.raises(CSRFValidationError): - security_service.validate_csrf_from_request(req) + token = security.generate_csrf_token() + assert isinstance(token, str) + # token_urlsafe(32) produces ~43 characters + assert len(token) >= 40 -def test_csrf_valid_tokens(test_settings: Settings) -> None: - security_service = SecurityService(test_settings) - token = security_service.generate_csrf_token() - req = make_request( - "POST", - "/api/v1/items", - headers={"X-CSRF-Token": token}, - cookies={"access_token": "tok", "csrf_token": token}, + def test_generates_unique_tokens(self, test_settings: Settings) -> None: + """Each CSRF token is unique.""" + security = SecurityService(test_settings) + + tokens = {security.generate_csrf_token() for _ in range(100)} + + # All 100 tokens should be unique + assert len(tokens) == 100 + + +class TestCSRFTokenValidation: + """Tests for CSRF token validation.""" + + def test_validates_matching_tokens(self, test_settings: Settings) -> None: + """Matching CSRF tokens pass validation.""" + security = SecurityService(test_settings) + token = security.generate_csrf_token() + + result = security.validate_csrf_token(token, token) + + assert result is True + + def test_rejects_mismatched_tokens(self, test_settings: Settings) -> None: + """Mismatched CSRF tokens fail validation.""" + security = SecurityService(test_settings) + + token1 = security.generate_csrf_token() + token2 = security.generate_csrf_token() + + result = security.validate_csrf_token(token1, token2) + + assert result is False + + @pytest.mark.parametrize( + ("header_token", "cookie_token"), + [ + ("", "valid_token"), + ("valid_token", ""), + ("", ""), + ], + ids=["empty_header", "empty_cookie", "both_empty"], ) - assert security_service.validate_csrf_from_request(req) == token + def test_rejects_empty_tokens( + self, test_settings: Settings, header_token: str, cookie_token: str + ) -> None: + """Empty CSRF tokens fail validation.""" + security = SecurityService(test_settings) + + result = security.validate_csrf_token(header_token, cookie_token) + + assert result is False + + +class TestCSRFExemptPaths: + """Tests for CSRF exempt path configuration.""" + + def test_exempt_paths_includes_auth_endpoints( + self, test_settings: Settings + ) -> None: + """CSRF exempt paths include auth endpoints.""" + security = SecurityService(test_settings) + + assert "/api/v1/auth/login" in security.CSRF_EXEMPT_PATHS + assert "/api/v1/auth/register" in security.CSRF_EXEMPT_PATHS + assert "/api/v1/auth/logout" in security.CSRF_EXEMPT_PATHS + + def test_exempt_paths_is_frozenset(self, test_settings: Settings) -> None: + """CSRF exempt paths is a frozenset (immutable).""" + security = SecurityService(test_settings) + + assert isinstance(security.CSRF_EXEMPT_PATHS, frozenset) + + +class TestCSRFRequestValidation: + """Tests for CSRF validation from HTTP requests.""" + + def test_skips_get_requests(self, test_settings: Settings) -> None: + """GET requests skip CSRF validation.""" + security = SecurityService(test_settings) + req = make_request("GET", "/api/v1/anything") + + assert security.validate_csrf_from_request(req) == "skip" + + def test_missing_header_raises_when_authenticated( + self, test_settings: Settings + ) -> None: + """Missing CSRF header raises error for authenticated POST.""" + security = SecurityService(test_settings) + req = make_request( + "POST", + "/api/v1/items", + cookies={"access_token": "tok", "csrf_token": "abc"}, + ) + + with pytest.raises(CSRFValidationError): + security.validate_csrf_from_request(req) + + def test_valid_tokens_pass(self, test_settings: Settings) -> None: + """Valid matching CSRF tokens pass validation.""" + security = SecurityService(test_settings) + token = security.generate_csrf_token() + req = make_request( + "POST", + "/api/v1/items", + headers={"X-CSRF-Token": token}, + cookies={"access_token": "tok", "csrf_token": token}, + ) + + assert security.validate_csrf_from_request(req) == token diff --git a/backend/tests/unit/core/test_exception_handlers.py b/backend/tests/unit/core/test_exception_handlers.py new file mode 100644 index 00000000..81657d80 --- /dev/null +++ b/backend/tests/unit/core/test_exception_handlers.py @@ -0,0 +1,48 @@ +import pytest +from app.core.exceptions.handlers import _map_to_status_code +from app.domain.exceptions import ( + ConflictError, + DomainError, + ForbiddenError, + InfrastructureError, + InvalidStateError, + NotFoundError, + ThrottledError, + UnauthorizedError, + ValidationError, +) + + +class TestExceptionMapping: + """Tests for domain exception to HTTP status code mapping.""" + + @pytest.mark.parametrize( + ("exception", "expected_status"), + [ + (NotFoundError(entity="User", identifier="123"), 404), + (ValidationError(message="Invalid input"), 422), + (ThrottledError(message="Rate limit exceeded"), 429), + (ConflictError(message="Resource already exists"), 409), + (UnauthorizedError(message="Invalid credentials"), 401), + (ForbiddenError(message="Access denied"), 403), + (InvalidStateError(message="Invalid state transition"), 400), + (InfrastructureError(message="Database connection failed"), 500), + (DomainError(message="Unknown error"), 500), + ], + ids=[ + "not_found_404", + "validation_422", + "throttled_429", + "conflict_409", + "unauthorized_401", + "forbidden_403", + "invalid_state_400", + "infrastructure_500", + "generic_domain_500", + ], + ) + def test_exception_maps_to_correct_status( + self, exception: DomainError, expected_status: int + ) -> None: + """Domain exception maps to correct HTTP status code.""" + assert _map_to_status_code(exception) == expected_status diff --git a/backend/tests/unit/core/test_logging_and_correlation.py b/backend/tests/unit/core/test_logging_and_correlation.py index f535ab9f..e54cc0be 100644 --- a/backend/tests/unit/core/test_logging_and_correlation.py +++ b/backend/tests/unit/core/test_logging_and_correlation.py @@ -5,7 +5,13 @@ import pytest from app.core.correlation import CorrelationContext, CorrelationMiddleware -from app.core.logging import CorrelationFilter, JSONFormatter, setup_logger +from app.core.logging import ( + CorrelationFilter, + JSONFormatter, + correlation_id_context, + request_metadata_context, + setup_logger, +) from starlette.applications import Starlette from starlette.requests import Request from starlette.responses import JSONResponse @@ -13,15 +19,18 @@ from starlette.testclient import TestClient -def capture_log(formatter: logging.Formatter, msg: str, extra: dict[str, Any] | None = None) -> dict[str, Any]: - logger = logging.getLogger("t") +def capture_log( + formatter: logging.Formatter, + msg: str, + extra: dict[str, Any] | None = None, +) -> dict[str, Any]: + """Capture log output as parsed JSON.""" + logger = logging.getLogger("test_capture") - # Use StringIO to capture output string_io = io.StringIO() stream = logging.StreamHandler(string_io) stream.setFormatter(formatter) - # Add the correlation filter correlation_filter = CorrelationFilter() stream.addFilter(correlation_filter) @@ -29,11 +38,9 @@ def capture_log(formatter: logging.Formatter, msg: str, extra: dict[str, Any] | logger.setLevel(logging.INFO) logger.propagate = False - # Log the message logger.info(msg, extra=extra or {}) stream.flush() - # Get the formatted output output = string_io.getvalue() string_io.close() @@ -42,49 +49,302 @@ def capture_log(formatter: logging.Formatter, msg: str, extra: dict[str, Any] | return result # Fallback: create and format record manually - lr = logging.LogRecord("t", logging.INFO, __file__, 1, msg, (), None, None) - # Apply the filter manually + lr = logging.LogRecord("test", logging.INFO, __file__, 1, msg, (), None, None) correlation_filter.filter(lr) s = formatter.format(lr) fallback_result: dict[str, Any] = json.loads(s) return fallback_result -def test_json_formatter_sanitizes_tokens(monkeypatch: pytest.MonkeyPatch) -> None: - # Force deterministic timestamp by monkeypatching datetime in formatter if needed - fmt = JSONFormatter() - msg = "Bearer abcd1234 and mongodb://user:secret@host/db and email a@b.com" - d = capture_log(fmt, msg) - s = d["message"] - assert "***BEARER_TOKEN_REDACTED***" in s - assert "***MONGODB_REDACTED***" in s - assert "***EMAIL_REDACTED***" in s - - -def test_correlation_context_and_filter() -> None: - CorrelationContext.set_correlation_id("cid-1") - CorrelationContext.set_request_metadata({"method": "GET", "path": "/x", "client": {"host": "1.2.3.4"}}) - d = capture_log(JSONFormatter(), "hello") - assert d["correlation_id"] == "cid-1" - assert d["request_method"] == "GET" - assert d["request_path"] == "/x" - assert d["client_host"] == "1.2.3.4" - CorrelationContext.clear() - - -def test_correlation_middleware_sets_header() -> None: - async def ping(request: Request) -> JSONResponse: - return JSONResponse({"ok": True}) - - app = Starlette(routes=[Route("/ping", ping)]) - app.add_middleware(CorrelationMiddleware) - with TestClient(app) as client: - r = client.get("/ping") - assert r.status_code == 200 - # Correlation header present - assert "X-Correlation-ID" in r.headers - - -def test_setup_logger_returns_logger() -> None: - lg = setup_logger(log_level="INFO") - assert hasattr(lg, "info") +class TestJSONFormatter: + """Tests for JSON log formatter.""" + + def test_formats_as_valid_json(self) -> None: + """Formatter outputs valid JSON.""" + formatter = JSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test message", + args=(), + exc_info=None, + ) + + output = formatter.format(record) + parsed = json.loads(output) + + assert parsed["message"] == "Test message" + assert parsed["level"] == "INFO" + assert parsed["logger"] == "test" + assert "timestamp" in parsed + + def test_includes_correlation_id_from_record(self) -> None: + """Formatter includes correlation_id when present on record.""" + formatter = JSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + record.correlation_id = "req_12345" + + output = formatter.format(record) + parsed = json.loads(output) + + assert parsed["correlation_id"] == "req_12345" + + def test_includes_request_metadata_from_record(self) -> None: + """Formatter includes request metadata when present on record.""" + formatter = JSONFormatter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + record.request_method = "POST" + record.request_path = "/api/v1/execute" + record.client_host = "192.168.1.1" + + output = formatter.format(record) + parsed = json.loads(output) + + assert parsed["request_method"] == "POST" + assert parsed["request_path"] == "/api/v1/execute" + assert parsed["client_host"] == "192.168.1.1" + + +class TestSensitiveDataSanitization: + """Tests for sensitive data sanitization in logs.""" + + @pytest.mark.parametrize( + ("input_data", "forbidden_text", "expected_marker"), + [ + ("api_key: secret12345", "secret12345", "REDACTED"), + ("Authorization: Bearer abc123xyz", "abc123xyz", "BEARER_TOKEN_REDACTED"), + ( + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9." + "eyJzdWIiOiIxMjM0NTY3ODkwIn0." + "dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U", + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9", + "JWT_REDACTED", + ), + ( + "mongodb://myuser:secretpass@localhost:27017/mydb", + "secretpass", + "MONGODB_REDACTED", + ), + ("user email: test@example.com", "test@example.com", "EMAIL_REDACTED"), + ('password: "mysecret123"', "mysecret123", "REDACTED"), + ( + "https://user:password@api.example.com/endpoint", + "password", + "URL_CREDS_REDACTED", + ), + ], + ids=[ + "api_key", + "bearer_token", + "jwt_token", + "mongodb_url", + "email", + "password_field", + "https_credentials", + ], + ) + def test_sanitizes_sensitive_data( + self, input_data: str, forbidden_text: str, expected_marker: str + ) -> None: + """Sensitive data is redacted from logs.""" + formatter = JSONFormatter() + + result = formatter._sanitize_sensitive_data(input_data) + + assert forbidden_text not in result + assert expected_marker in result + + def test_sanitizes_multiple_types_in_one_message(self) -> None: + """Multiple sensitive data types are sanitized in a single message.""" + formatter = JSONFormatter() + msg = "Bearer abcd1234 and mongodb://user:secret@host/db and email a@b.com" + + result = capture_log(formatter, msg) + sanitized = result["message"] + + assert "BEARER_TOKEN_REDACTED" in sanitized + assert "MONGODB_REDACTED" in sanitized + assert "EMAIL_REDACTED" in sanitized + + +class TestCorrelationFilter: + """Tests for correlation filter.""" + + def test_adds_correlation_id_from_context(self) -> None: + """Filter adds correlation_id from context to record.""" + filter_ = CorrelationFilter() + + token = correlation_id_context.set("test-correlation-123") + try: + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + + result = filter_.filter(record) + + assert result is True + assert record.correlation_id == "test-correlation-123" # type: ignore[attr-defined] + finally: + correlation_id_context.reset(token) + + def test_adds_request_metadata_from_context(self) -> None: + """Filter adds request metadata from context to record.""" + filter_ = CorrelationFilter() + + metadata = { + "method": "GET", + "path": "/api/v1/test", + "client": {"host": "127.0.0.1"}, + } + token = request_metadata_context.set(metadata) + try: + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + + result = filter_.filter(record) + + assert result is True + assert record.request_method == "GET" # type: ignore[attr-defined] + assert record.request_path == "/api/v1/test" # type: ignore[attr-defined] + assert record.client_host == "127.0.0.1" # type: ignore[attr-defined] + finally: + request_metadata_context.reset(token) + + def test_always_returns_true(self) -> None: + """Filter always returns True (never drops records).""" + filter_ = CorrelationFilter() + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="test.py", + lineno=1, + msg="Test", + args=(), + exc_info=None, + ) + + assert filter_.filter(record) is True + + +class TestCorrelationContext: + """Tests for CorrelationContext usage.""" + + def test_context_and_filter_integration(self) -> None: + """CorrelationContext integrates with CorrelationFilter.""" + CorrelationContext.set_correlation_id("cid-1") + CorrelationContext.set_request_metadata( + {"method": "GET", "path": "/x", "client": {"host": "1.2.3.4"}} + ) + + result = capture_log(JSONFormatter(), "hello") + + assert result["correlation_id"] == "cid-1" + assert result["request_method"] == "GET" + assert result["request_path"] == "/x" + assert result["client_host"] == "1.2.3.4" + + CorrelationContext.clear() + + +class TestCorrelationMiddleware: + """Tests for CorrelationMiddleware.""" + + def test_sets_correlation_header(self) -> None: + """Middleware sets X-Correlation-ID response header.""" + + async def ping(request: Request) -> JSONResponse: + return JSONResponse({"ok": True}) + + app = Starlette(routes=[Route("/ping", ping)]) + app.add_middleware(CorrelationMiddleware) + + with TestClient(app) as client: + response = client.get("/ping") + + assert response.status_code == 200 + assert "X-Correlation-ID" in response.headers + + +class TestSetupLogger: + """Tests for logger setup.""" + + def test_creates_named_logger(self) -> None: + """setup_logger creates logger with correct name.""" + logger = setup_logger("INFO") + + assert logger.name == "integr8scode" + + def test_sets_correct_level(self) -> None: + """Logger is set to correct level.""" + logger = setup_logger("WARNING") + + assert logger.level == logging.WARNING + + def test_handles_case_insensitive_level(self) -> None: + """Logger handles case-insensitive level strings.""" + logger = setup_logger("debug") + + assert logger.level == logging.DEBUG + + def test_has_json_formatter(self) -> None: + """Logger has JSON formatter attached.""" + logger = setup_logger("INFO") + + assert len(logger.handlers) > 0 + handler = logger.handlers[0] + assert isinstance(handler.formatter, JSONFormatter) + + def test_has_correlation_filter(self) -> None: + """Logger has correlation filter attached.""" + logger = setup_logger("INFO") + + assert len(logger.handlers) > 0 + handler = logger.handlers[0] + filter_types = [type(f).__name__ for f in handler.filters] + assert "CorrelationFilter" in filter_types + + def test_clears_existing_handlers(self) -> None: + """setup_logger clears existing handlers.""" + logger1 = setup_logger("INFO") + initial_handlers = len(logger1.handlers) + + logger2 = setup_logger("DEBUG") + + assert len(logger2.handlers) == initial_handlers + + def test_returns_logger(self) -> None: + """setup_logger returns a logger instance.""" + lg = setup_logger(log_level="INFO") + + assert hasattr(lg, "info") diff --git a/backend/tests/unit/events/test_event_dispatcher.py b/backend/tests/unit/events/test_event_dispatcher.py index 6bda67e8..ea526c95 100644 --- a/backend/tests/unit/events/test_event_dispatcher.py +++ b/backend/tests/unit/events/test_event_dispatcher.py @@ -4,7 +4,7 @@ from app.domain.events.typed import DomainEvent from app.events.core import EventDispatcher -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event _test_logger = logging.getLogger("test.events.event_dispatcher") diff --git a/backend/tests/unit/services/coordinator/test_queue_manager.py b/backend/tests/unit/services/coordinator/test_queue_manager.py index b4b39b2d..671b19a7 100644 --- a/backend/tests/unit/services/coordinator/test_queue_manager.py +++ b/backend/tests/unit/services/coordinator/test_queue_manager.py @@ -5,7 +5,7 @@ from app.domain.events.typed import ExecutionRequestedEvent from app.services.coordinator.queue_manager import QueueManager, QueuePriority -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event _test_logger = logging.getLogger("test.services.coordinator.queue_manager") diff --git a/backend/tests/helpers/k8s_fakes.py b/backend/tests/unit/services/pod_monitor/conftest.py similarity index 65% rename from backend/tests/helpers/k8s_fakes.py rename to backend/tests/unit/services/pod_monitor/conftest.py index d45f0895..46d3c84b 100644 --- a/backend/tests/helpers/k8s_fakes.py +++ b/backend/tests/unit/services/pod_monitor/conftest.py @@ -1,14 +1,12 @@ -"""Lightweight K8s pod/watch stubs for unit tests. - -These provide only the attributes PodEventMapper/PodMonitor touch, keeping -tests fast and self-contained without importing heavy Kubernetes models. -""" - from __future__ import annotations from typing import Any, Iterable +# ===== Pod data model stubs ===== +# These create test data, not "fakes" - they're test data factories + + class Meta: def __init__( self, @@ -105,6 +103,9 @@ def __init__( self.spec = Spec(adl) +# ===== Factory functions ===== + + def make_pod( *, name: str, @@ -119,6 +120,7 @@ def make_pod( node_name: str | None = None, resource_version: str | None = None, ) -> Pod: + """Create a test Pod with sensible defaults.""" cs: list[ContainerStatus] = [] if waiting_reason is not None: cs.append(ContainerStatus(State(waiting=Waiting(waiting_reason, waiting_message)))) @@ -138,23 +140,18 @@ def make_pod( return pod -class FakeApi: - def __init__(self, logs: str) -> None: - self._logs = logs - - def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 10000) -> str: # noqa: ARG002 - return self._logs +# ===== Watch stream helpers ===== class StopEvent: - """Fake stop event for FakeWatch - holds resource_version.""" + """Stop event for watch stream - holds resource_version.""" def __init__(self, resource_version: str) -> None: self.resource_version = resource_version -class FakeWatchStream: - """Fake watch stream object returned by FakeWatch.stream(). +class MockWatchStream: + """Mock watch stream that yields events from a list. The real kubernetes watch stream has a _stop_event attribute that holds the resource_version for use by _update_resource_version. @@ -165,7 +162,7 @@ def __init__(self, events: list[dict[str, Any]], resource_version: str) -> None: self._stop_event = StopEvent(resource_version) self._index = 0 - def __iter__(self) -> "FakeWatchStream": + def __iter__(self) -> "MockWatchStream": return self def __next__(self) -> dict[str, Any]: @@ -176,61 +173,36 @@ def __next__(self) -> dict[str, Any]: return event -class FakeWatch: - """Fake kubernetes Watch for testing.""" - - def __init__(self, events: list[dict[str, Any]], resource_version: str) -> None: - self._events = events - self._rv = resource_version - - def stream( - self, func: Any, **kwargs: Any # noqa: ARG002 - ) -> FakeWatchStream: - return FakeWatchStream(self._events, self._rv) +def make_mock_watch(events: list[dict[str, Any]], resource_version: str = "rv2") -> Any: + """Create a mock Watch that returns the given events. - def stop(self) -> None: - return None - - -def make_watch(events: list[dict[str, Any]], resource_version: str = "rv2") -> FakeWatch: - return FakeWatch(events, resource_version) - - -class FakeV1Api: - """Fake CoreV1Api for testing PodMonitor.""" - - def __init__(self, logs: str = "{}", pods: list[Pod] | None = None) -> None: - self._logs = logs - self._pods = pods or [] - - def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 10000) -> str: # noqa: ARG002 - return self._logs - - def get_api_resources(self) -> None: - """Stub for connectivity check.""" - return None + Usage: + mock_watch = make_mock_watch([{"type": "MODIFIED", "object": pod}]) + mock_watch.stream.return_value = MockWatchStream(events, "rv2") + """ + from unittest.mock import MagicMock - def list_namespaced_pod(self, namespace: str, label_selector: str) -> Any: # noqa: ARG002 - """Return configured pods for reconciliation tests.""" + mock = MagicMock() + mock.stream.return_value = MockWatchStream(events, resource_version) + mock.stop.return_value = None + return mock - class PodList: - def __init__(self, items: list[Pod]) -> None: - self.items = items - return PodList(list(self._pods)) +def make_mock_v1_api(logs: str = "{}", pods: list[Pod] | None = None) -> Any: + """Create a mock CoreV1Api with configurable responses. + Usage: + mock_api = make_mock_v1_api(logs='{"stdout":"ok","stderr":"","exit_code":0}') + """ + from unittest.mock import MagicMock -def make_k8s_clients( - logs: str = "{}", - events: list[dict[str, Any]] | None = None, - resource_version: str = "rv1", - pods: list[Pod] | None = None, -) -> tuple[FakeV1Api, FakeWatch]: - """Create fake K8s clients for testing. + mock = MagicMock() + mock.read_namespaced_pod_log.return_value = logs + mock.get_api_resources.return_value = None - Returns (v1_api, watch) tuple for pure DI into PodMonitor. - """ - v1 = FakeV1Api(logs=logs, pods=pods) - watch = make_watch(events or [], resource_version) - return v1, watch + class PodList: + def __init__(self, items: list[Pod]) -> None: + self.items = items + mock.list_namespaced_pod.return_value = PodList(list(pods or [])) + return mock diff --git a/backend/tests/unit/services/pod_monitor/test_event_mapper.py b/backend/tests/unit/services/pod_monitor/test_event_mapper.py index 2314de3b..1d5632e9 100644 --- a/backend/tests/unit/services/pod_monitor/test_event_mapper.py +++ b/backend/tests/unit/services/pod_monitor/test_event_mapper.py @@ -1,5 +1,6 @@ import json import logging +from unittest.mock import MagicMock import pytest from app.domain.enums.events import EventType @@ -13,9 +14,8 @@ ) from app.services.pod_monitor.event_mapper import PodContext, PodEventMapper -from tests.helpers.k8s_fakes import ( +from tests.unit.services.pod_monitor.conftest import ( ContainerStatus, - FakeApi, Pod, State, Terminated, @@ -37,6 +37,13 @@ def _ctx(pod: Pod, event_type: str = "ADDED") -> PodContext: ) +def _make_mock_api(logs: str = "{}") -> MagicMock: + """Create a mock API that returns the given logs.""" + mock = MagicMock() + mock.read_namespaced_pod_log.return_value = logs + return mock + + def test_pending_running_and_succeeded_mapping() -> None: logs_json = json.dumps({ "stdout": "ok", @@ -49,7 +56,7 @@ def test_pending_running_and_succeeded_mapping() -> None: "peak_memory_kb": 0, }, }) - pem = PodEventMapper(k8s_api=FakeApi(logs_json), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api(logs_json), logger=_test_logger) # Pending -> scheduled (set execution-id label and PodScheduled condition) pend = Pod("p", "Pending") @@ -91,7 +98,7 @@ def __init__(self, t: str, s: str) -> None: def test_failed_timeout_and_deleted() -> None: valid_logs = json.dumps({"stdout": "", "stderr": "", "exit_code": 137, "resource_usage": {}}) - pem = PodEventMapper(k8s_api=FakeApi(valid_logs), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api(valid_logs), logger=_test_logger) # Timeout via DeadlineExceeded pod_to = Pod( @@ -105,7 +112,7 @@ def test_failed_timeout_and_deleted() -> None: # Failed: terminated exit_code nonzero, message used as stderr, error type defaults to SCRIPT_ERROR # Note: ExecutionFailedEvent can have None resource_usage when logs extraction fails - pem_no_logs = PodEventMapper(k8s_api=FakeApi(""), logger=_test_logger) + pem_no_logs = PodEventMapper(k8s_api=_make_mock_api(""), logger=_test_logger) pod_fail = Pod("p2", "Failed", cs=[ContainerStatus(State(terminated=Terminated(2, message="boom")))]) pod_fail.metadata.labels = {"execution-id": "e2"} evf = pem_no_logs.map_pod_event(pod_fail, "MODIFIED")[0] @@ -114,7 +121,7 @@ def test_failed_timeout_and_deleted() -> None: # Deleted -> terminated when container terminated present (exit code 0 returns completed for DELETED) valid_logs_0 = json.dumps({"stdout": "", "stderr": "", "exit_code": 0, "resource_usage": {}}) - pem_completed = PodEventMapper(k8s_api=FakeApi(valid_logs_0), logger=_test_logger) + pem_completed = PodEventMapper(k8s_api=_make_mock_api(valid_logs_0), logger=_test_logger) pod_del = Pod("p3", "Failed", cs=[ContainerStatus(State(terminated=Terminated(0, reason="Completed")))]) pod_del.metadata.labels = {"execution-id": "e3"} evd = pem_completed.map_pod_event(pod_del, "DELETED")[0] @@ -123,7 +130,7 @@ def test_failed_timeout_and_deleted() -> None: def test_extract_id_and_metadata_priority_and_duplicates() -> None: - pem = PodEventMapper(k8s_api=FakeApi(""), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api(""), logger=_test_logger) # From label p = Pod("any", "Pending") @@ -154,7 +161,7 @@ def __init__(self, t: str, s: str) -> None: self.type = t self.status = s - pem = PodEventMapper(k8s_api=FakeApi(""), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api(""), logger=_test_logger) pod = Pod("p", "Pending") # No conditions -> None assert pem._map_scheduled(_ctx(pod)) is None @@ -170,7 +177,7 @@ def __init__(self, t: str, s: str) -> None: def test_parse_and_log_paths_and_analyze_failure_variants(caplog: pytest.LogCaptureFixture) -> None: # _parse_executor_output line-by-line line_json = '{"stdout":"x","stderr":"","exit_code":3,"resource_usage":{}}' - pem = PodEventMapper(k8s_api=FakeApi("junk\n" + line_json), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api("junk\n" + line_json), logger=_test_logger) pod = Pod("p", "Succeeded", cs=[ContainerStatus(State(terminated=Terminated(0)))]) logs = pem._extract_logs(pod) assert logs is not None @@ -181,23 +188,20 @@ def test_parse_and_log_paths_and_analyze_failure_variants(caplog: pytest.LogCapt assert pem2._extract_logs(pod) is None # _extract_logs exceptions -> 404/400/generic branches, all return None - class _API404(FakeApi): - def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 10000) -> str: # noqa: ARG002 - raise Exception("404 Not Found") + mock_404 = MagicMock() + mock_404.read_namespaced_pod_log.side_effect = Exception("404 Not Found") - class _API400(FakeApi): - def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 10000) -> str: # noqa: ARG002 - raise Exception("400 Bad Request") + mock_400 = MagicMock() + mock_400.read_namespaced_pod_log.side_effect = Exception("400 Bad Request") - class _APIGen(FakeApi): - def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 10000) -> str: # noqa: ARG002 - raise Exception("boom") + mock_gen = MagicMock() + mock_gen.read_namespaced_pod_log.side_effect = Exception("boom") - pem404 = PodEventMapper(k8s_api=_API404(""), logger=_test_logger) + pem404 = PodEventMapper(k8s_api=mock_404, logger=_test_logger) assert pem404._extract_logs(pod) is None - pem400 = PodEventMapper(k8s_api=_API400(""), logger=_test_logger) + pem400 = PodEventMapper(k8s_api=mock_400, logger=_test_logger) assert pem400._extract_logs(pod) is None - pemg = PodEventMapper(k8s_api=_APIGen(""), logger=_test_logger) + pemg = PodEventMapper(k8s_api=mock_gen, logger=_test_logger) assert pemg._extract_logs(pod) is None # _analyze_failure: Evicted @@ -221,7 +225,7 @@ def read_namespaced_pod_log(self, name: str, namespace: str, tail_lines: int = 1 def test_all_containers_succeeded_and_cache_behavior() -> None: valid_logs = json.dumps({"stdout": "", "stderr": "", "exit_code": 0, "resource_usage": {}}) - pem = PodEventMapper(k8s_api=FakeApi(valid_logs), logger=_test_logger) + pem = PodEventMapper(k8s_api=_make_mock_api(valid_logs), logger=_test_logger) term0 = ContainerStatus(State(terminated=Terminated(0))) term0b = ContainerStatus(State(terminated=Terminated(0))) pod = Pod("p", "Failed", cs=[term0, term0b]) diff --git a/backend/tests/unit/services/pod_monitor/test_monitor.py b/backend/tests/unit/services/pod_monitor/test_monitor.py index dc93a150..aaf9f68d 100644 --- a/backend/tests/unit/services/pod_monitor/test_monitor.py +++ b/backend/tests/unit/services/pod_monitor/test_monitor.py @@ -26,14 +26,12 @@ from app.settings import Settings from kubernetes.client.rest import ApiException -from tests.helpers.k8s_fakes import ( - FakeApi, - FakeV1Api, - FakeWatch, - FakeWatchStream, - make_k8s_clients, +from tests.unit.services.pod_monitor.conftest import ( + MockWatchStream, + Pod, + make_mock_v1_api, + make_mock_watch, make_pod, - make_watch, ) pytestmark = pytest.mark.unit @@ -108,10 +106,12 @@ def map_pod_event(self, pod: Any, event_type: WatchEventType) -> list[Any]: # n def make_k8s_clients_di( events: list[dict[str, Any]] | None = None, resource_version: str = "rv1", - pods: list[Any] | None = None, + pods: list[Pod] | None = None, + logs: str = "{}", ) -> K8sClients: - """Create K8sClients for DI with fakes.""" - v1, watch = make_k8s_clients(events=events, resource_version=resource_version, pods=pods) + """Create K8sClients for DI with mocks.""" + v1 = make_mock_v1_api(logs=logs, pods=pods) + watch = make_mock_watch(events or [], resource_version) return K8sClients( api_client=MagicMock(), v1=v1, @@ -132,7 +132,7 @@ def make_pod_monitor( """Create PodMonitor with sensible test defaults.""" cfg = config or PodMonitorConfig() clients = k8s_clients or make_k8s_clients_di() - mapper = event_mapper or PodEventMapper(logger=_test_logger, k8s_api=FakeApi("{}")) + mapper = event_mapper or PodEventMapper(logger=_test_logger, k8s_api=make_mock_v1_api("{}")) service = kafka_service or create_test_kafka_event_service(event_metrics)[0] return PodMonitor( config=cfg, @@ -288,17 +288,15 @@ async def mock_process(event: PodEvent) -> None: async def test_reconcile_state_exception(event_metrics: EventMetrics, kubernetes_metrics: KubernetesMetrics) -> None: cfg = PodMonitorConfig() - class FailV1(FakeV1Api): - def list_namespaced_pod(self, namespace: str, label_selector: str) -> Any: - raise RuntimeError("API error") + fail_v1 = MagicMock() + fail_v1.list_namespaced_pod.side_effect = RuntimeError("API error") - fail_v1 = FailV1() k8s_clients = K8sClients( api_client=MagicMock(), v1=fail_v1, apps_v1=MagicMock(), networking_v1=MagicMock(), - watch=make_watch([]), + watch=make_mock_watch([]), ) pm = make_pod_monitor(event_metrics, kubernetes_metrics, config=cfg, k8s_clients=k8s_clients) @@ -535,8 +533,8 @@ async def mock_handle() -> None: async def test_create_pod_monitor_context_manager(event_metrics: EventMetrics, kubernetes_metrics: KubernetesMetrics, monkeypatch: pytest.MonkeyPatch) -> None: """Test create_pod_monitor factory with auto-created dependencies.""" # Mock create_k8s_clients to avoid real K8s connection - mock_v1 = FakeV1Api() - mock_watch = make_watch([]) + mock_v1 = make_mock_v1_api() + mock_watch = make_mock_watch([]) mock_clients = K8sClients( api_client=MagicMock(), v1=mock_v1, @@ -576,8 +574,8 @@ async def test_create_pod_monitor_with_injected_k8s_clients(event_metrics: Event service, _ = create_test_kafka_event_service(event_metrics) - mock_v1 = FakeV1Api() - mock_watch = make_watch([]) + mock_v1 = make_mock_v1_api() + mock_watch = make_mock_watch([]) mock_k8s_clients = K8sClients( api_client=MagicMock(), v1=mock_v1, @@ -717,22 +715,29 @@ async def test_watch_pod_events_with_field_selector(event_metrics: EventMetrics, watch_kwargs: list[dict[str, Any]] = [] - class TrackingV1(FakeV1Api): - def list_namespaced_pod(self, namespace: str, label_selector: str) -> Any: - watch_kwargs.append({"namespace": namespace, "label_selector": label_selector}) - return None + tracking_v1 = MagicMock() - class TrackingWatch(FakeWatch): - def stream(self, func: Any, **kwargs: Any) -> FakeWatchStream: - watch_kwargs.append(kwargs) - return FakeWatchStream([], "rv1") + def track_list(namespace: str, label_selector: str) -> None: + watch_kwargs.append({"namespace": namespace, "label_selector": label_selector}) + return None + + tracking_v1.list_namespaced_pod.side_effect = track_list + + tracking_watch = MagicMock() + + def track_stream(func: Any, **kwargs: Any) -> MockWatchStream: # noqa: ARG001 + watch_kwargs.append(kwargs) + return MockWatchStream([], "rv1") + + tracking_watch.stream.side_effect = track_stream + tracking_watch.stop.return_value = None k8s_clients = K8sClients( api_client=MagicMock(), - v1=TrackingV1(), + v1=tracking_v1, apps_v1=MagicMock(), networking_v1=MagicMock(), - watch=TrackingWatch([], "rv1"), + watch=tracking_watch, ) pm = make_pod_monitor(event_metrics, kubernetes_metrics, config=cfg, k8s_clients=k8s_clients) diff --git a/backend/tests/unit/services/saga/test_execution_saga_steps.py b/backend/tests/unit/services/saga/test_execution_saga_steps.py index 8c235076..bcd517f8 100644 --- a/backend/tests/unit/services/saga/test_execution_saga_steps.py +++ b/backend/tests/unit/services/saga/test_execution_saga_steps.py @@ -15,7 +15,7 @@ ) from app.services.saga.saga_step import SagaContext -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event pytestmark = pytest.mark.unit diff --git a/backend/tests/unit/services/saga/test_saga_comprehensive.py b/backend/tests/unit/services/saga/test_saga_comprehensive.py index 14bd756a..a473c2b3 100644 --- a/backend/tests/unit/services/saga/test_saga_comprehensive.py +++ b/backend/tests/unit/services/saga/test_saga_comprehensive.py @@ -13,7 +13,7 @@ from app.services.saga.execution_saga import ExecutionSaga from app.services.saga.saga_step import CompensationStep, SagaContext, SagaStep -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event pytestmark = pytest.mark.unit diff --git a/backend/tests/unit/services/saga/test_saga_orchestrator_unit.py b/backend/tests/unit/services/saga/test_saga_orchestrator_unit.py index b414884a..8f2b35f9 100644 --- a/backend/tests/unit/services/saga/test_saga_orchestrator_unit.py +++ b/backend/tests/unit/services/saga/test_saga_orchestrator_unit.py @@ -18,7 +18,7 @@ from app.services.saga.saga_step import CompensationStep, SagaContext, SagaStep from app.settings import Settings -from tests.helpers import make_execution_requested_event +from tests.conftest import make_execution_requested_event pytestmark = pytest.mark.unit diff --git a/docs/operations/cicd.md b/docs/operations/cicd.md index 51766210..54ff0130 100644 --- a/docs/operations/cicd.md +++ b/docs/operations/cicd.md @@ -132,54 +132,45 @@ graph TD B[Frontend Unit Tests] end - subgraph "Sequential (single stack)" - C[Setup k3s] - D[Build Images] - E[Start Stack] - F[Backend Integration] - G[Backend E2E] - H[Frontend E2E] - C --> D --> E --> F --> G --> H + subgraph "Build" + C[Build Images] + end + + subgraph "Backend E2E (own runner)" + D1[Setup k3s + Stack] + E[Backend E2E Tests] + D1 --> E + end + + subgraph "Frontend E2E (own runner)" + D2[Setup k3s + Stack] + F[Frontend E2E Tests] + D2 --> F end A --> C B --> C + C --> D1 + C --> D2 style A fill:#e8f5e9 style B fill:#e8f5e9 style C fill:#e1f5fe - style D fill:#e1f5fe - style E fill:#e1f5fe + style D1 fill:#e1f5fe + style D2 fill:#e1f5fe + style E fill:#fff3e0 style F fill:#fff3e0 - style G fill:#fff3e0 - style H fill:#fff3e0 ``` -### Why unified? - -Previously, backend integration, backend E2E, and frontend E2E tests each started their own full stack independently. -This caused: -- **3x setup time**: k3s installation, image builds, and docker-compose startup repeated for each job -- **~15 minutes total**: Each job took ~5 minutes, running in parallel but with redundant work - -The unified approach: -- **1x setup time**: Stack starts once, all tests run sequentially against it -- **~10 minutes total**: Single setup (~5 min) + all tests (~5 min) -- **Better resource efficiency**: One runner instead of three - ### Test execution order 1. **Unit tests (parallel)**: Backend and frontend unit tests run simultaneously. They require no infrastructure and complete quickly (~1-2 min each). -2. **Stack setup**: After unit tests pass, the stack-tests job: - - Installs k3s for Kubernetes functionality - - Builds all Docker images (with GHA layer caching) - - Starts the full stack via `./deploy.sh dev --ci` - - Seeds test users +2. **Image build**: After unit tests pass, all Docker images are built with GHA layer caching. -3. **Integration & E2E tests (sequential)**: All tests run against the same stack: - - Backend integration tests (pytest) +3. **E2E tests (parallel)**: Backend and frontend E2E tests run in parallel on separate runners, each with its own + isolated stack (k3s + docker compose): - Backend E2E tests (pytest with k8s) - Frontend E2E tests (Playwright) @@ -187,7 +178,7 @@ The unified approach: Each test suite reports coverage to [Codecov](https://codecov.io/): - `backend-unit` flag for unit tests -- `backend-stack` flag for integration + E2E tests (combined) +- `backend-e2e` flag for E2E tests - `frontend-unit` flag for frontend unit tests ## Documentation @@ -231,14 +222,13 @@ npx tsc --noEmit npm run test ``` -For integration and E2E tests, use the same deployment as CI: +For E2E tests, use the same deployment as CI: ```bash # Start full stack (requires k8s configured locally) ./deploy.sh dev # Run tests inside the running backend container -docker compose exec -T backend uv run pytest tests/integration -v docker compose exec -T backend uv run pytest tests/e2e -v # Run frontend E2E tests @@ -294,7 +284,7 @@ This eliminates copy-paste across workflows and ensures consistent k8s setup. | Frontend CI | `.github/workflows/frontend-ci.yml` | TypeScript lint and type check | | Security Scanning | `.github/workflows/security.yml` | Bandit SAST | | Docker Build & Scan| `.github/workflows/docker.yml` | Image build and Trivy scan | -| Stack Tests | `.github/workflows/stack-tests.yml` | All unit, integration, and E2E tests | +| Stack Tests | `.github/workflows/stack-tests.yml` | All unit and E2E tests | | Documentation | `.github/workflows/docs.yml` | MkDocs build and deploy | All workflows use [uv](https://docs.astral.sh/uv/) for Python dependency management and npm for Node.js, with caching diff --git a/docs/operations/deployment.md b/docs/operations/deployment.md index 12dd323c..1dc7492f 100644 --- a/docs/operations/deployment.md +++ b/docs/operations/deployment.md @@ -119,7 +119,7 @@ docker compose down -v # Also removes persistent volumes ### Running tests locally -The `test` command runs the full integration and unit test suite: +The `test` command runs the full unit and E2E test suite: ```bash ./deploy.sh test