diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 00000000..89d5d882 --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,75 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +firebase-debug.log* +firebase-debug.*.log* + +# Firebase cache +.firebase/ + +# Firebase config + +# Uncomment this if you'd like others to create their own Firebase project. +# For a team working on the same Firebase project(s), it is recommended to leave +# it commented so all members can deploy to the same project(s) in .firebaserc. +# .firebaserc + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + +# dataconnect generated files +.dataconnect + +# firebase-config files +app/services/firebase + +# quarantine files +app/services/quarantine/ \ No newline at end of file diff --git a/backend/app/config.py b/backend/app/config.py index 3ee6f809..e0ed318c 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -4,10 +4,15 @@ from logging.config import dictConfig from typing import Optional +from dotenv import load_dotenv +from PIL import Image, ImageFile +from pydantic import Field from pydantic_settings import BaseSettings from starlette.middleware.base import BaseHTTPMiddleware from starlette.requests import Request +load_dotenv() + class Settings(BaseSettings): # Database @@ -20,8 +25,15 @@ class Settings(BaseSettings): access_token_expire_minutes: int = 15 refresh_token_expire_days: int = 30 # Firebase - firebase_project_id: Optional[str] = None - firebase_service_account_path: str = "./firebase-service-account.json" + use_firebase_emulator: bool = Field( + default=False, env="USE_FIREBASE_EMULATOR" + ) # type:ignore + firebase_project_id: Optional[str] = Field( + default=None, env="FIREBASE_PROJECT_ID" + ) # type:ignore + firebase_service_account_path: str = Field( + default="./firebase-service-account.json", env="FIREBASE_SERVICE_ACCOUNT_PATH" + ) # type:ignore # Firebase service account credentials as environment variables firebase_type: Optional[str] = None firebase_private_key_id: Optional[str] = None @@ -32,6 +44,14 @@ class Settings(BaseSettings): firebase_token_uri: Optional[str] = None firebase_auth_provider_x509_cert_url: Optional[str] = None firebase_client_x509_cert_url: Optional[str] = None + # Image validation configs + LOAD_TRUNCATED_IMAGES: bool = False + MAX_IMAGE_PIXELS: int = 50_00_000 + MAX_FILE_SIZE: int = 5 * 1024 * 1024 + SIGNED_URL_EXPIRY_SECONDS: int = Field( + default=3600, env="SIGNED_URL_EXPIRY_SECONDS" + ) # type:ignore + CLAMAV_ENABLED: bool = False # App debug: bool = False diff --git a/backend/app/groups/routes.py b/backend/app/groups/routes.py index 4c36707b..ed1b2efe 100644 --- a/backend/app/groups/routes.py +++ b/backend/app/groups/routes.py @@ -15,7 +15,17 @@ RemoveMemberResponse, ) from app.groups.service import group_service -from fastapi import APIRouter, Depends, HTTPException, status +from app.services.schemas import ImageUploadResponse +from app.services.storage import storage_service +from fastapi import ( + APIRouter, + BackgroundTasks, + Depends, + File, + HTTPException, + UploadFile, + status, +) router = APIRouter(prefix="/groups", tags=["Groups"]) @@ -145,3 +155,64 @@ async def remove_group_member( if not removed: raise HTTPException(status_code=400, detail="Failed to remove member") return RemoveMemberResponse(success=True, message="Member removed successfully") + + +@router.post("/{group_id}/image", response_model=ImageUploadResponse) +async def upload_group_image( + group_id: str, + file: UploadFile = File(...), + background_tasks: BackgroundTasks = BackgroundTasks(), + current_user: dict = Depends(get_current_user), +): + + await group_service.ensure_user_in_group(group_id, current_user["_id"]) + + try: + urls = await storage_service.upload_image_workflow( + file=file, folder="groups", entity_id=group_id + ) + + except ValueError as ve: + raise HTTPException(status_code=400, detail=str(ve)) + except Exception: + raise HTTPException(status_code=500, detail="Group image upload failed") + + background_tasks.add_task( + group_service.update_group_image_url, group_id, urls.get("full") + ) + + return ImageUploadResponse( + success=True, urls=urls, message="Group image uploaded successfully." + ) + + +@router.delete("/{group_id}/image", response_model=DeleteGroupResponse) +async def delete_group_avatar( + group_id: str, + current_user: dict = Depends(get_current_user), + background_tasks: BackgroundTasks = BackgroundTasks(), +): + group = await group_service.get_group_by_id(group_id, current_user["_id"]) + if not group: + raise HTTPException(status_code=404, detail="Group not found") + + await group_service.ensure_user_in_group(group_id, current_user["_id"]) + + image_url = group.get("imageUrl") + if not image_url: + raise HTTPException(status_code=404, detail="Group avatar not found") + + try: + file_path = storage_service.extract_path_from_url(image_url) + deleted = await storage_service.delete_image(file_path) + except Exception as e: + raise HTTPException(status_code=500, detail="Failed to delete group avatar") + + if not deleted: + raise HTTPException(status_code=500, detail="Failed to delete group avatar") + + background_tasks.add_task(group_service.update_group_image_url, group_id, None) + + return DeleteGroupResponse( + success=True, message="Group image deleted successfully." + ) diff --git a/backend/app/groups/service.py b/backend/app/groups/service.py index c70086e9..02003019 100644 --- a/backend/app/groups/service.py +++ b/backend/app/groups/service.py @@ -473,5 +473,45 @@ async def remove_member(self, group_id: str, member_id: str, user_id: str) -> bo ) return result.modified_count == 1 + async def ensure_user_in_group(self, group_id: str, user_id: str) -> dict: + """Ensure that the user is a member of the group. Raises HTTPException if not.""" + db = self.get_db() + + try: + obj_id = ObjectId(group_id) + + except errors.InvalidId: + logger.warning(f"Invalid group_id: {group_id}") + raise HTTPException(status_code=400, detail="Invalid group ID format") + except Exception as e: + logger.error(f"Unexpected error converting group_id to ObjectId: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + group = await db.groups.find_one( + {"_id": obj_id, "members": {"$elemMatch": {"userId": user_id}}} + ) + + if not group: + raise HTTPException( + status_code=403, detail="You are not a member of this group" + ) + + return group # Optional return if route needs to read group data + + async def update_group_image_url(self, group_id: str, image_url: str) -> bool: + """Update the group's image URL in the database.""" + db = self.get_db() + + try: + obj_id = ObjectId(group_id) + except errors.InvalidId: + logger.warning(f"Invalid group_id: {group_id}") + return False + + result = await db.groups.update_one( + {"_id": obj_id}, {"$set": {"imageUrl": image_url}} + ) + return result.modified_count == 1 + group_service = GroupService() diff --git a/backend/app/services/__init__.py b/backend/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/app/services/image_processor.py b/backend/app/services/image_processor.py new file mode 100644 index 00000000..280de5d7 --- /dev/null +++ b/backend/app/services/image_processor.py @@ -0,0 +1,122 @@ +import imghdr +from io import BytesIO +from typing import Dict, Tuple + +from app.config import logger +from PIL import Image, ImageFile, UnidentifiedImageError + +# Optional watermark path +WATERMARK_PATH = None # Example: "app/assets/watermark.png" + +# Resize targets (square thumbnail + larger sizes) +RESIZE_CONFIG = { + "thumbnail": (150, 150), + "medium": (300, 300), + "full": (800, 800), +} + +# Defining image file restrictions +ImageFile.LOAD_TRUNCATED_IMAGES = False +Image.MAX_IMAGE_PIXELS = 50_00_000 # 50MB in worst case + + +def strip_exif(image: Image.Image) -> Image.Image: + """ + Returns a copy of the image with EXIF metadata stripped. + """ + clean_image = Image.new(image.mode, image.size) + clean_image.putdata(list(image.getdata())) + return clean_image + + +def validate_magic_bytes(file_content: bytes): + """ + Validates the actual file type of image + """ + fmt = imghdr.what(None, h=file_content) + if fmt not in ["jpeg", "png", "webp"]: + raise ValueError("Invalid or unsupported image type.") + + +def add_watermark(image: Image.Image, watermark: Image.Image) -> Image.Image: + """ + Adds watermark (bottom-right). Image and watermark must be RGBA. + """ + image = image.convert("RGBA") + watermark = watermark.convert("RGBA") + + # Resize watermark if larger than image + wm_width = min(watermark.width, int(image.width * 0.3)) + wm_height = int(watermark.height * (wm_width / watermark.width)) + watermark = watermark.resize((wm_width, wm_height), Image.Resampling.LANCZOS) + + # Paste watermark at bottom-right + position = (image.width - wm_width - 10, image.height - wm_height - 10) + image.alpha_composite(watermark, dest=position) + return image + + +def resize_image(image: Image.Image, size: Tuple[int, int]) -> Image.Image: + """ + Resize image while maintaining aspect ratio and padding to square if needed. + """ + image.thumbnail(size, Image.Resampling.LANCZOS) + + # Pad to square if needed (for thumbnails) + if size[0] == size[1]: + padded = Image.new("RGB", size, (255, 255, 255)) + offset = ((size[0] - image.width) // 2, (size[1] - image.height) // 2) + padded.paste(image, offset) + return padded + + return image + + +async def process_image(file_content: bytes) -> Dict[str, bytes]: + """ + Validates, processes, resizes, strips metadata, compresses to WebP, + and optionally watermarks the image. + Returns a dict of resized images in WebP format. + """ + try: + validate_magic_bytes(file_content) + + img = Image.open(BytesIO(file_content)) + img_format = img.format.upper() + + # Validate format + if img_format not in ["JPEG", "PNG", "WEBP"]: + raise ValueError(f"Unsupported image format: {img_format}") + + img = strip_exif(img) + + if WATERMARK_PATH: + watermark = Image.open(WATERMARK_PATH) + else: + watermark = None + + results = {} + + for label, size in RESIZE_CONFIG.items(): + resized = resize_image(img.copy(), size) + + if watermark: + resized = add_watermark(resized, watermark) + + # Save to memory in WebP format + buffer = BytesIO() + resized.save( + buffer, format="WEBP", quality=85, method=6 + ) # High quality with compression + buffer.seek(0) + + results[label] = buffer.read() + + return results + + except UnidentifiedImageError: + logger.exception("Uploaded file is not a valid image.") + raise ValueError("Invalid image content.") + except Exception as e: + logger.exception(f"Image processing error: {e}") + raise RuntimeError("Image processing failed.") diff --git a/backend/app/services/schemas.py b/backend/app/services/schemas.py new file mode 100644 index 00000000..8050f918 --- /dev/null +++ b/backend/app/services/schemas.py @@ -0,0 +1,10 @@ +from typing import Dict, Optional + +from pydantic import BaseModel + + +class ImageUploadResponse(BaseModel): + success: bool + urls: Dict[str, str] # {"thumbnail": "url", "medium": "url", "full": "url"} + message: str + processing_id: Optional[str] = None diff --git a/backend/app/services/storage.py b/backend/app/services/storage.py new file mode 100644 index 00000000..0c2815ba --- /dev/null +++ b/backend/app/services/storage.py @@ -0,0 +1,430 @@ +import asyncio +import datetime +import io +import os +import re +import subprocess +import uuid +from pathlib import Path +from typing import Dict, Union +from urllib.parse import urlparse +from uuid import uuid4 + +import firebase_admin +from app.config import logger, settings +from app.services.image_processor import process_image +from dotenv import load_dotenv +from fastapi import UploadFile +from firebase_admin import credentials, storage +from firebase_admin.exceptions import FirebaseError +from PIL import Image, UnidentifiedImageError + +load_dotenv() + +try: + use_emulator = settings.use_firebase_emulator + service_account_path = settings.firebase_service_account_path + project_id = settings.firebase_project_id + + if use_emulator: + os.environ["FIREBASE_STORAGE_EMULATOR_HOST"] = "http://127.0.0.1:4000" + logger.info("Using Firebase Storage Emulator") + + if not project_id: + raise ValueError("FIREBASE_PROJECT_ID environment variable is not set.") + + if not use_emulator and ( + not service_account_path or not os.path.exists(service_account_path) + ): + raise FileNotFoundError("Service account path is missing or invalid.") + + # Prevent multiple initializations + try: + firebase_admin.get_app() + logger.info("Firebase already initialized.") + except ValueError: + if use_emulator: + # No credentials, no bucket + firebase_admin.initialize_app(options={"projectId": project_id}) + logger.info("Firebase initialized with emulator (no credentials).") + else: + cred = credentials.Certificate(service_account_path) + firebase_admin.initialize_app( + cred, {"storageBucket": f"{project_id}.firebasestorage.app"} + ) + logger.info("Firebase initialized with service account file.") + +except Exception as e: + logger.exception(f"Firebase initialization failed: {e}") + raise +""" +This Storage Class implements the following functions: +1. upload_image_workflow +2. validate_file +3. process_image +4. upload_to_firebase +5. delete_image +""" + +PIL_FORMAT_TO_MIME = { + "JPEG": "image/jpeg", + "PNG": "image/png", + "WEBP": "image/webp", +} + + +MAX_IMAGE_PIXELS = settings.MAX_IMAGE_PIXELS +LOAD_TRUNCATED_IMAGES = settings.LOAD_TRUNCATED_IMAGES +SIGNED_URL_EXPIRY_SECONDS = settings.SIGNED_URL_EXPIRY_SECONDS +CLAMAV_ENABLED = settings.CLAMAV_ENABLED # Default False +BASE_QUARANTINE_DIR = Path(__file__).resolve().parent / "quarantine" + + +class FirebaseStorageService: + def __init__(self): + pass + + def get_bucket(self) -> str: + return f"{project_id}.firebasestorage.app" + + def generate_secure_file_path( + self, entity_id: str, folder: str, filename: str + ) -> str: + """ + Generates a unique file path for Firebase Storage + Ex: users//.webp or groups//.webp + """ + unique_id = uuid4().hex + filename = filename or "" + ext = os.path.splitext(filename)[-1].lower() or ".webp" + safe_filename = f"{unique_id}{ext}" + return f"{folder}/{entity_id}/{safe_filename}" + + def extract_path_from_url(self, url: str) -> str: + """ + Extracts the Firebase Storage blob path from a full public URL. + + """ + parsed_url = urlparse(url) + path = parsed_url.path.lstrip("/") + + # Remove the bucket name if included in path + if path.startswith(f"{project_id}.firebasestorage.app/"): + path = path.replace(f"{project_id}.firebasestorage.app/", "", 1) + + # Strip the _.webp suffix (e.g., _thumbnail.webp, _medium.webp, etc.) + path = re.sub(r"_(thumbnail|medium|full)\.webp$", "", path) + + return path + + '''async def _scan_with_clamav_async(self, content: bytes) -> bool: + """ + Optional inline ClamAV scan. Returns: + - True => scanned and clean + - False => infected or scanning failed + - None => scanning not attempted (CLAMAV_ENABLED False) + """ + if not CLAMAV_ENABLED: + logger.debug("ClamAV inline scanning disabled.") + return None + + try: + import tempfile + + def run_scan(tmp_path: str): + completed = subprocess.run( + ["clamscan", "--no-summary", tmp_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + check=False, + ) + return completed + + with tempfile.NamedTemporaryFile(delete=True) as tmp: + tmp.write(content) + tmp.flush() + completed = await asyncio.to_thread(run_scan, tmp.name) + + stdout = completed.stdout or "" + # clamscan returns non-zero when infected, but we check text for robustness + if "FOUND" in stdout: + logger.warning("ClamAV detected malware in uploaded file.") + return False + if completed.returncode not in (0, 1): + logger.exception(f"ClamAV returned unexpected code {completed.returncode}. stdout: {stdout}") + return False + + logger.debug("ClamAV inline scan passed.") + return True + + except Exception as e: + logger.exception(f"ClamAV scan failed: {e}") + return False''' + + async def validate_file(self, file: UploadFile) -> bool: + """ + Validates file type (JPEG, PNG, WebP) and max size (5MB). + Uses pillow for validation. + """ + try: + content = await file.read() + await file.seek(0) + + if len(content) > settings.MAX_FILE_SIZE: + logger.warning("File size larger than 5MB.") + return False + + try: + img = Image.open(io.BytesIO(content)) + img.load() + except UnidentifiedImageError: + logger.warning("Failed to identify image format with Pillow.") + return False + except Exception: + logger.warning( + "Pillow failed to fully load the image (possibly corrupt or too large)." + ) + return False + + img_format = img.format + if img_format is None or PIL_FORMAT_TO_MIME.get(img_format) not in [ + "image/jpeg", + "image/png", + "image/webp", + ]: + logger.warning(f"Rejected image due to invalid format: {img_format}") + return False + + if img.width * img.height > MAX_IMAGE_PIXELS: + logger.warning("Image pixel count exceeds allowed threshold.") + return False + + return True + + except Exception as e: + logger.exception(f"File validation failed: {e}") + return False + + async def save_to_quarantine( + self, + file: Union[UploadFile, bytes], + folder: str, + entity_id: str, + original_filename: str, + ) -> str: + """ + Saves a file to the local quarantine directory for scanning/processing later. + Uses the same secure naming convention as generate_secure_file_path. + + Returns: + str: Full path to the saved file. + """ + # Generate secure name + secure_relative_path = self.generate_secure_file_path( + entity_id=entity_id, folder=folder, filename=original_filename + ) + + # Convert storage path to local quarantine path + target_path = BASE_QUARANTINE_DIR / secure_relative_path + target_path.parent.mkdir(parents=True, exist_ok=True) + + # Handle UploadFile or raw bytes + try: + if isinstance(file, UploadFile): + content = await file.read() + elif isinstance(file, bytes): + content = file + else: + raise TypeError( + f"Invalid file type: {type(file)}. Must be UploadFile or bytes." + ) + except Exception as e: + logger.exception(f"Failed to read file content: {e}") + raise + + # Save locally + try: + with open(target_path, "wb") as f: + f.write(content) + except Exception as e: + logger.exception(f"Failed to write file to quarantine: {e}") + raise RuntimeError("Failed to save file to quarantine.") from e + + return str(target_path) + + async def generate_signed_url( + self, blob, expires_seconds: int = SIGNED_URL_EXPIRY_SECONDS + ) -> str: + """ + Generate a signed URL for a blob (works with google-cloud-storage Blob). + """ + try: + expiry = datetime.timedelta(seconds=expires_seconds) + + def gen(): + # google-cloud-storage Blob.generate_signed_url + return blob.generate_signed_url(expiration=expiry, method="GET") + + url = await asyncio.to_thread(gen) + return url + except Exception as e: + logger.exception("Failed to generate signed URL.") + raise RuntimeError("Failed to generate signed URL.") from e + + async def upload_to_firebase( + self, processed_images: Dict[str, bytes], file_path: str + ) -> Dict[str, str]: + """ + Uploads processed images (thumbnail/medium/full) to a public path, returns signed URLs. + Does NOT call blob.make_public(). + """ + urls = {} + try: + bucket_name = self.get_bucket() + bucket = storage.bucket(bucket_name) + for size, content in processed_images.items(): + # Ensure file names end with .webp + blob_path = f"{file_path}_{size}.webp" + blob = bucket.blob(blob_path) + blob.upload_from_string(content, content_type="image/webp") + + if use_emulator: + url = f"http://127.0.0.1:4000/storage/{bucket_name}/o/{blob_path.replace('/', '%2F')}?alt=media" + else: + url = await self.generate_signed_url( + blob, expires_seconds=SIGNED_URL_EXPIRY_SECONDS + ) + + urls[size] = url + logger.info(f"Uploaded image (signed): {blob_path}") + return urls + + except FirebaseError as fe: + logger.exception("Firebase error during image upload.") + raise + except Exception as e: + logger.exception("Image upload to Firebase failed.") + raise RuntimeError("Failed to upload image to Firebase.") from e + + async def move_quarantine_to_public( + self, quarantine_path: str, public_base_path: str + ) -> Dict[str, str]: + """ + Worker-friendly method: + - loads the quarantine file from local disk + - optionally runs ClamAV inline + - processes the image (re-encode + sizes) + - uploads processed images to public path + - deletes quarantine file + Returns signed URLs for the uploaded images. + """ + try: + if not os.path.exists(quarantine_path): + logger.error(f"Quarantine file not found: {quarantine_path}") + raise ValueError("Quarantine file not found.") + + # Read file bytes + with open(quarantine_path, "rb") as f: + content = f.read() + + """# Inline scan if enabled else skipped + scan_result = await self._scan_with_clamav_async(content) + if scan_result is False: + try: + os.remove(quarantine_path) + except Exception: + logger.warning("Failed to delete infected quarantine file.") + logger.warning("Quarantine file marked as infected and removed.") + raise ValueError("File failed virus scan.")""" + + # Process the image + processed = await self.process_image(content) + + # Upload processed images to Firebase public path + urls = await self.upload_to_firebase(processed, public_base_path) + + # Cleanup local quarantine file + try: + os.remove(quarantine_path) + except Exception: + logger.warning("Failed to delete quarantine file after processing.") + + return urls + + except Exception as e: + logger.exception("Failed to move file from quarantine to public.") + raise + + async def process_image(self, file_content: bytes) -> Dict[str, bytes]: + """ + Generates 3 resized, optimized images (thumbnail, medium, full) in WebP format. + """ + try: + return await process_image( + file_content + ) # Calls image_proccessor.py's process_image + except Exception as e: + logger.exception("Image processing failed.") + raise RuntimeError("Failed to process image.") from e + + async def delete_image(self, file_path: str) -> bool: + """ + Deletes all size variants (thumbnail, medium, full) from Firebase Storage. + """ + try: + bucket_name = self.get_bucket() + bucket = storage.bucket(bucket_name) + for size in ["thumbnail", "medium", "full"]: + blob_path = f"{file_path}_{size}.webp" + blob = bucket.blob(blob_path) + blob.delete() + logger.info(f"Deleted image: {blob_path}") + return True + + except Exception as e: + logger.exception(f"Failed to delete images at {file_path}") + return False + + async def upload_image_workflow( + self, file: UploadFile, folder: str, entity_id: str + ) -> Dict[str, str]: + """ + Workflow: + - Validate (magic bytes + Pillow checks) + - Save raw to quarantine + - If ClamAV enabled: scan + process inline + - If ClamAV disabled: leave in quarantine for later processing + """ + try: + if not await self.validate_file(file): + logger.error("Invalid file provided to upload_image_workflow.") + raise ValueError("Invalid file type or size.") + + content = await file.read() + file_path = self.generate_secure_file_path(entity_id, folder, file.filename) + logger.info(f"Generated secure path: {file_path}") + + quarantine_path = await self.save_to_quarantine( + content, folder, entity_id, file.filename + ) + + if CLAMAV_ENABLED: + # Original path: scan + process inline + urls = await self.move_quarantine_to_public(quarantine_path, file_path) + logger.info( + f"Upload workflow successful for {folder} entity {entity_id}" + ) + return urls + else: + # Directly process image and upload (no scanning) + logger.info("ClamAV disabled: processing image directly.") + urls = await self.move_quarantine_to_public(quarantine_path, file_path) + return urls + + except Exception as e: + logger.exception("Upload image workflow failed.") + raise RuntimeError("Image upload failed.") from e + + +storage_service = FirebaseStorageService() diff --git a/backend/app/user/routes.py b/backend/app/user/routes.py index 6f0b283f..29c4aeb1 100644 --- a/backend/app/user/routes.py +++ b/backend/app/user/routes.py @@ -1,13 +1,24 @@ from typing import Any, Dict from app.auth.security import get_current_user +from app.config import logger +from app.services.schemas import ImageUploadResponse +from app.services.storage import storage_service from app.user.schemas import ( DeleteUserResponse, UserProfileResponse, UserProfileUpdateRequest, ) from app.user.service import user_service -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import ( + APIRouter, + BackgroundTasks, + Depends, + File, + HTTPException, + UploadFile, + status, +) router = APIRouter(prefix="/users", tags=["User"]) @@ -55,3 +66,62 @@ async def delete_user_account(current_user: Dict[str, Any] = Depends(get_current return DeleteUserResponse( success=True, message="User account scheduled for deletion." ) + + +@router.post("/me/avatar", response_model=ImageUploadResponse) +async def upload_user_avatar( + file: UploadFile = File(...), + background_tasks: BackgroundTasks = BackgroundTasks(), + current_user: dict = Depends(get_current_user), +): + user_id = str(current_user["_id"]) + + try: + # Validate, process, upload + urls = await storage_service.upload_image_workflow( + file=file, folder="users", entity_id=user_id + ) + except ValueError as ve: + raise HTTPException(status_code=400, detail=str(ve)) + except FileNotFoundError: + raise HTTPException(status_code=404, detail="Storage location not found.") + except Exception as e: + logger.exception(f"Unexpected error during avatar upload for user {user_id}") + raise HTTPException( + status_code=500, detail="Image upload failed due to an internal error." + ) + + # Update DB in background + background_tasks.add_task( + user_service.update_user_avatar_url, user_id, urls.get("full") + ) + + return ImageUploadResponse( + success=True, urls=urls, message="Avatar uploaded successfully." + ) + + +@router.delete("/me/avatar", response_model=DeleteUserResponse) +async def delete_user_avatar( + current_user: dict = Depends(get_current_user), + background_tasks: BackgroundTasks = BackgroundTasks(), +): + + user_id = str(current_user["_id"]) + + user = await user_service.get_user_by_id(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + image_url = user.get("imageUrl") + + if not image_url: + raise HTTPException(status_code=404, detail="User avatar not found") + + file_path = storage_service.extract_path_from_url(image_url) + if not await storage_service.delete_image(file_path): + raise HTTPException(status_code=500, detail="Failed to delete image") + + background_tasks.add_task(user_service.update_user_avatar_url, user_id, None) + + return DeleteUserResponse(success=True, message="User avatar deleted successfully.") diff --git a/backend/app/user/service.py b/backend/app/user/service.py index 8ce83dd5..c6957564 100644 --- a/backend/app/user/service.py +++ b/backend/app/user/service.py @@ -4,6 +4,7 @@ from app.config import logger from app.database import get_database from bson import ObjectId, errors +from fastapi import UploadFile class UserService: @@ -92,5 +93,12 @@ async def delete_user(self, user_id: str) -> bool: result = await db.users.delete_one({"_id": obj_id}) return result.deleted_count > 0 + async def update_user_avatar_url(self, user_id: str, image_url: str) -> bool: + db = self.get_db() + result = await db.users.update_one( + {"_id": ObjectId(user_id)}, {"$set": {"imageUrl": image_url}} + ) + return result.modified_count == 1 + user_service = UserService() diff --git a/backend/requirements.txt b/backend/requirements.txt index 14825b14..4fd8e542 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -18,3 +18,5 @@ mongomock-motor pytest-env pytest-cov pytest-mock +pillow>=10.0.0 +python-magic>=0.4.27 \ No newline at end of file diff --git a/backend/tests/services/__init__.py b/backend/tests/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/services/test_image_processor.py b/backend/tests/services/test_image_processor.py new file mode 100644 index 00000000..1467afcb --- /dev/null +++ b/backend/tests/services/test_image_processor.py @@ -0,0 +1,96 @@ +from io import BytesIO + +import pytest +from app.services import image_processor as ip +from PIL import Image + + +def make_test_image(fmt="PNG", size=(200, 100), color=(255, 0, 0)): + """Utility to generate an in-memory test image.""" + img = Image.new("RGB", size, color) + buf = BytesIO() + img.save(buf, format=fmt) + return buf.getvalue() + + +def test_strip_exif_returns_new_image(): + """strip_exif should return a new image with identical pixels but no metadata.""" + img = Image.new("RGB", (10, 10), (123, 222, 111)) + clean = ip.strip_exif(img) + assert clean.size == img.size + assert list(clean.getdata()) == list(img.getdata()) + assert clean is not img + + +@pytest.mark.parametrize("fmt", ["jpeg", "png", "webp"]) +def test_validate_magic_bytes_valid(fmt): + """validate_magic_bytes should pass for JPEG, PNG, and WebP formats.""" + content = make_test_image(fmt.upper()) + ip.validate_magic_bytes(content) # should not raise + + +@pytest.mark.parametrize("content", [b"notanimage", make_test_image("GIF")]) +def test_validate_magic_bytes_invalid(content): + """validate_magic_bytes should raise ValueError for unsupported formats or random bytes.""" + with pytest.raises(ValueError, match="Invalid or unsupported image type"): + ip.validate_magic_bytes(content) + + +def test_resize_image_square_padding(): + """resize_image should pad non-square images to square when requested.""" + img = Image.new("RGB", (100, 50), (0, 0, 255)) + resized = ip.resize_image(img, (150, 150)) + assert resized.size == (150, 150) + center_pixel = resized.getpixel((75, 75)) + assert center_pixel == (0, 0, 255) + + +def test_add_watermark_places_watermark(): + """add_watermark should overlay watermark in bottom-right of image.""" + base = Image.new("RGBA", (200, 200), (255, 255, 255, 255)) + watermark = Image.new("RGBA", (50, 50), (0, 255, 0, 128)) + out = ip.add_watermark(base, watermark) + px = out.getpixel((190, 190)) + assert px[1] > 200 # green applied + + +@pytest.mark.asyncio +async def test_process_image_happy_path(): + """process_image should return resized WebP images for valid input.""" + content = make_test_image("PNG", size=(400, 400)) + results = await ip.process_image(content) + assert set(results.keys()) == {"thumbnail", "medium", "full"} + assert all(len(data) > 0 for data in results.values()) + + +@pytest.mark.asyncio +async def test_process_image_invalid_format(): + """process_image should raise RuntimeError for unsupported image formats (e.g. GIF).""" + bad_content = make_test_image("GIF") + with pytest.raises(RuntimeError, match="Image processing failed"): + await ip.process_image(bad_content) + + +@pytest.mark.asyncio +async def test_process_image_corrupted_bytes(): + """process_image should raise RuntimeError when given corrupted bytes.""" + with pytest.raises(RuntimeError, match="Image processing failed."): + await ip.process_image(b"notanimage") + + +@pytest.mark.asyncio +async def test_process_image_with_watermark(monkeypatch, tmp_path): + """process_image should apply watermark if WATERMARK_PATH is set.""" + watermark_file = tmp_path / "wm.png" + Image.new("RGBA", (30, 30), (0, 255, 0, 255)).save(watermark_file, "PNG") + + monkeypatch.setattr(ip, "WATERMARK_PATH", str(watermark_file)) + + content = make_test_image("PNG", size=(300, 300), color=(255, 255, 255)) + results = await ip.process_image(content) + + img = Image.open(BytesIO(results["thumbnail"])) + px = img.getpixel((140, 140)) # bottom-right area + assert px != (255, 255, 255) # watermark altered pixel + + monkeypatch.setattr(ip, "WATERMARK_PATH", None) # reset global diff --git a/backend/tests/services/test_storage_service.py b/backend/tests/services/test_storage_service.py new file mode 100644 index 00000000..0a762232 --- /dev/null +++ b/backend/tests/services/test_storage_service.py @@ -0,0 +1,662 @@ +import io +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, mock_open, patch +from uuid import UUID + +import app.services.storage as storage_module +import pytest +from fastapi import UploadFile +from firebase_admin.exceptions import FirebaseError +from PIL import Image + +"""# Apply monkeypatch as a fixture +@pytest.fixture(autouse=True) +def patch_firebase(monkeypatch): + monkeypatch.setattr("app.services.storage.storage", object()) + monkeypatch.setattr("app.services.storage.project_id", "dummy-project") + + monkeypatch.setattr(storage_module, "uuid4", lambda: UUID("12345678123456781234567812345678")) + + yield """ + +from app.services.storage import FirebaseStorageService + +service = FirebaseStorageService() + + +@pytest.fixture +def fixed_uuid(monkeypatch): + monkeypatch.setattr( + "app.services.storage.uuid4", lambda: UUID("12345678123456781234567812345678") + ) + yield + + +def test_generate_secure_file_path_with_extension(fixed_uuid): + result = service.generate_secure_file_path("user123", "users", "image.png") + assert result.startswith("users/user123/12345678123456781234567812345678") + assert result.endswith(".png") + + +def test_generate_secure_file_path_without_extension(fixed_uuid): + result = service.generate_secure_file_path("user123", "users", "image") + assert result.endswith(".webp") + + +def test_generate_secure_file_path_empty_entity(fixed_uuid): + result = service.generate_secure_file_path("", "users", "image.png") + assert result.startswith("users//12345678123456781234567812345678") + assert result.endswith(".png") + + +def test_generate_secure_file_path_empty_folder(fixed_uuid): + result = service.generate_secure_file_path("user123", "", "image.png") + assert result.startswith("/user123/12345678123456781234567812345678") + assert result.endswith(".png") + + +def test_generate_secure_file_path_empty_filename(fixed_uuid): + result = service.generate_secure_file_path("user123", "users", "") + assert result.endswith(".webp") + + +def test_generate_secure_file_path_none_filename(fixed_uuid): + result = service.generate_secure_file_path("user123", "users", None) + assert result.endswith(".webp") + assert "12345678123456781234567812345678.webp" in result + + +def test_extract_path_from_url_normal(): + """Extracts path from a normal Firebase URL without suffix.""" + url = "https://dummy-project.firebasestorage.app/o/users%2Fuser123%2Ffile.webp?alt=media" + result = service.extract_path_from_url(url) + assert result == "o/users%2Fuser123%2Ffile.webp" + + +def test_extract_path_from_url_with_suffixes(): + """Removes _thumbnail.webp, _medium.webp, and _full.webp suffixes from URL.""" + for suffix in ["_thumbnail.webp", "_medium.webp", "_full.webp"]: + url = f"https://dummy-project.firebasestorage.app/o/users%2Fuser123%2Ffile{suffix}?alt=media" + result = service.extract_path_from_url(url) + assert result == "o/users%2Fuser123%2Ffile" + + +def test_extract_path_from_url_already_clean(): + """Returns path unchanged if no bucket name or suffix is present.""" + url = "https://example.com/path/to/file.webp" + result = service.extract_path_from_url(url) + assert result == "path/to/file.webp" + + +def test_extract_path_from_url_malformed(): + """Handles malformed URLs gracefully by returning whatever path can be parsed.""" + url = "not a valid url" + result = service.extract_path_from_url(url) + # urlparse still returns a path string, even if malformed + assert result == "not a valid url" + + +def test_extract_path_from_url_unexpected_suffix(): + """Leaves URL unchanged if suffix is not thumbnail/medium/full.""" + url = "https://dummy-project.firebasestorage.app/o/users%2Fuser123%2Ffile_large.webp?alt=media" + result = service.extract_path_from_url(url) + assert result == "o/users%2Fuser123%2Ffile_large.webp" + + +@pytest.mark.asyncio +async def test_validate_file_valid_png(monkeypatch): + """Returns True for a valid PNG file under max size and pixel limit.""" + # Create a small in-memory PNG + img_bytes = io.BytesIO() + img = Image.new("RGB", (10, 10)) + img.save(img_bytes, format="PNG") + img_bytes = img_bytes.getvalue() + + # Mock UploadFile + file = AsyncMock(spec=UploadFile) + file.read.return_value = img_bytes + file.seek.return_value = None + + # Patch limits + monkeypatch.setattr( + "app.services.storage.settings", + type("obj", (), {"MAX_FILE_SIZE": 5 * 1024 * 1024}), + ) + monkeypatch.setattr( + "app.services.storage.settings", + type("obj", (), {"MAX_FILE_SIZE": 5 * 1024 * 1024, "MAX_IMAGE_PIXELS": 10000}), + ) + + result = await service.validate_file(file) + assert result is True + + +@pytest.mark.asyncio +async def test_validate_file_oversized(monkeypatch): + """Returns False for files exceeding MAX_FILE_SIZE.""" + img_bytes = b"x" * (5 * 1024 * 1024) # 5MB + + file = AsyncMock(spec=UploadFile) + file.read.return_value = img_bytes + file.seek.return_value = None + + monkeypatch.setattr( + "app.services.storage.settings", + type("obj", (), {"MAX_FILE_SIZE": 5 * 1024 * 1024}), + ) + + result = await service.validate_file(file) + assert result is False + + +@pytest.mark.asyncio +async def test_validate_file_corrupt_image(monkeypatch): + """Returns False if Pillow cannot identify the image (corrupt).""" + file = AsyncMock(spec=UploadFile) + file.read.return_value = b"not an image" + file.seek.return_value = None + + result = await service.validate_file(file) + assert result is False + + +@pytest.mark.asyncio +async def test_validate_file_unsupported_format(monkeypatch): + """Returns False if image format is not JPEG/PNG/WebP.""" + # Create an image with BMP format (unsupported) + img_bytes = io.BytesIO() + img = Image.new("RGB", (10, 10)) + img.save(img_bytes, format="BMP") + img_bytes = img_bytes.getvalue() + + file = AsyncMock(spec=UploadFile) + file.read.return_value = img_bytes + file.seek.return_value = None + + monkeypatch.setattr("app.services.storage.PIL_FORMAT_TO_MIME", {"BMP": "image/bmp"}) + + result = await service.validate_file(file) + assert result is False + + +@pytest.mark.asyncio +async def test_validate_file_too_many_pixels(monkeypatch): + """Returns False if image width*height exceeds MAX_IMAGE_PIXELS.""" + img_bytes = io.BytesIO() + img = Image.new("RGB", (200, 200)) # 40,000 pixels + img.save(img_bytes, format="PNG") + img_bytes = img_bytes.getvalue() + + file = AsyncMock(spec=UploadFile) + file.read.return_value = img_bytes + file.seek.return_value = None + + monkeypatch.setattr("app.services.storage.MAX_IMAGE_PIXELS", 1000) + + result = await service.validate_file(file) + assert result is False + + +@pytest.mark.asyncio +async def test_save_to_quarantine_uploadfile(monkeypatch): + """Saves an UploadFile to the quarantine directory using a secure path.""" + file_content = b"dummy content" + file = AsyncMock(spec=UploadFile) + file.read.return_value = file_content + + # Patch generate_secure_file_path to return a fixed filename + monkeypatch.setattr( + service, + "generate_secure_file_path", + lambda *args, **kwargs: "folder/entity/file.webp", + ) + + # Patch BASE_QUARANTINE_DIR at the module level + mock_path = MagicMock(spec=Path) + mock_path.__truediv__.return_value = mock_path # for / + monkeypatch.setattr("app.services.storage.BASE_QUARANTINE_DIR", mock_path) + mock_path.parent.mkdir.return_value = None + + m_open = mock_open() + with patch("builtins.open", m_open): + result = await service.save_to_quarantine(file, "folder", "entity", "file.webp") + + assert result == str(mock_path) + m_open.assert_called_once_with(mock_path, "wb") + m_open().write.assert_called_once_with(file_content) + + +@pytest.mark.asyncio +async def test_save_to_quarantine_bytes(monkeypatch): + """Saves raw bytes to the quarantine directory using a secure path.""" + file_content = b"raw bytes content" + + monkeypatch.setattr( + service, + "generate_secure_file_path", + lambda *args, **kwargs: "folder/entity/file.webp", + ) + + # Patch BASE_QUARANTINE_DIR at the module level + mock_path = MagicMock(spec=Path) + mock_path.__truediv__.return_value = mock_path + monkeypatch.setattr("app.services.storage.BASE_QUARANTINE_DIR", mock_path) + mock_path.parent.mkdir.return_value = None + + m_open = mock_open() + with patch("builtins.open", m_open): + result = await service.save_to_quarantine( + file_content, "folder", "entity", "file.webp" + ) + + assert result == str(mock_path) + m_open.assert_called_once_with(mock_path, "wb") + m_open().write.assert_called_once_with(file_content) + + +@pytest.mark.asyncio +async def test_save_to_quarantine_invalid_type(monkeypatch): + """Raises TypeError if file is neither UploadFile nor bytes.""" + monkeypatch.setattr( + service, "generate_secure_file_path", lambda *a, **kw: "folder/entity/file.webp" + ) + with pytest.raises(TypeError, match="Invalid file type"): + await service.save_to_quarantine(12345, "folder", "entity", "file.webp") + + +@pytest.mark.asyncio +async def test_save_to_quarantine_read_failure(monkeypatch): + """Raises exception if UploadFile.read() fails.""" + file = AsyncMock(spec=UploadFile) + file.read.side_effect = Exception("Read failed") + + monkeypatch.setattr( + service, "generate_secure_file_path", lambda *a, **kw: "folder/entity/file.webp" + ) + monkeypatch.setattr( + "app.services.storage.BASE_QUARANTINE_DIR", MagicMock(spec=Path) + ) + + with pytest.raises(Exception, match="Read failed"): + await service.save_to_quarantine(file, "folder", "entity", "file.webp") + + +@pytest.mark.asyncio +async def test_save_to_quarantine_write_failure(monkeypatch): + """Raises RuntimeError if writing file fails.""" + file_content = b"dummy content" + file = AsyncMock(spec=UploadFile) + file.read.return_value = file_content + + monkeypatch.setattr( + service, "generate_secure_file_path", lambda *a, **kw: "folder/entity/file.webp" + ) + mock_path = MagicMock(spec=Path) + monkeypatch.setattr("app.services.storage.BASE_QUARANTINE_DIR", mock_path) + mock_path.__truediv__.return_value = mock_path + mock_path.parent.mkdir.return_value = None + + # Patch open to raise IOError + m_open = mock_open() + m_open.side_effect = IOError("Write failed") + with patch("builtins.open", m_open): + with pytest.raises(RuntimeError, match="Failed to save file to quarantine"): + await service.save_to_quarantine(file, "folder", "entity", "file.webp") + + +@pytest.mark.asyncio +async def test_generate_signed_url_happy(): + """Returns the signed URL when blob.generate_signed_url succeeds.""" + blob = MagicMock() + blob.generate_signed_url.return_value = "http://signed-url.com/file.webp" + + url = await service.generate_signed_url(blob, expires_seconds=60) + assert url == "http://signed-url.com/file.webp" + blob.generate_signed_url.assert_called_once() + + +@pytest.mark.asyncio +async def test_generate_signed_url_unhappy(): + """Raises RuntimeError if blob.generate_signed_url fails.""" + blob = MagicMock() + blob.generate_signed_url.side_effect = Exception("Firebase error") + + with pytest.raises(RuntimeError, match="Failed to generate signed URL"): + await service.generate_signed_url(blob, expires_seconds=60) + blob.generate_signed_url.assert_called_once() + + +@pytest.mark.asyncio +async def test_upload_to_firebase_emulator(monkeypatch): + """Returns emulator URLs when use_emulator=True.""" + monkeypatch.setattr(storage_module, "use_emulator", True) + + processed_images = {"thumbnail": b"thumb", "full": b"full"} + mock_blob = MagicMock() + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + + monkeypatch.setattr(storage_module.storage, "bucket", lambda name: mock_bucket) + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + + urls = await service.upload_to_firebase(processed_images, "file/path") + + for size in processed_images: + assert size in urls + assert urls[size].startswith("http://127.0.0.1:4000/storage/") + + assert mock_blob.upload_from_string.call_count == len(processed_images) + + +@pytest.mark.asyncio +async def test_upload_to_firebase_signed_url(monkeypatch): + """Returns signed URLs when use_emulator=False.""" + monkeypatch.setattr(storage_module, "use_emulator", False) + + processed_images = {"thumbnail": b"thumb"} + mock_blob = MagicMock() + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + + monkeypatch.setattr(storage_module.storage, "bucket", lambda name: mock_bucket) + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + monkeypatch.setattr( + service, "generate_signed_url", AsyncMock(return_value="http://signed-url.com") + ) + + urls = await service.upload_to_firebase(processed_images, "file/path") + + assert urls["thumbnail"] == "http://signed-url.com" + mock_blob.upload_from_string.assert_called_once_with( + b"thumb", content_type="image/webp" + ) + service.generate_signed_url.assert_awaited_once_with( + mock_blob, expires_seconds=storage_module.SIGNED_URL_EXPIRY_SECONDS + ) + + +@pytest.mark.asyncio +async def test_upload_to_firebase_generate_signed_url_failure(monkeypatch): + """Raises RuntimeError if generate_signed_url fails.""" + monkeypatch.setattr(storage_module, "use_emulator", False) + + processed_images = {"thumbnail": b"thumb"} + mock_blob = MagicMock() + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + + monkeypatch.setattr(storage_module.storage, "bucket", lambda name: mock_bucket) + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + monkeypatch.setattr( + service, + "generate_signed_url", + AsyncMock(side_effect=Exception("signed_url fail")), + ) + + with pytest.raises(RuntimeError, match="Failed to upload image to Firebase"): + await service.upload_to_firebase(processed_images, "file/path") + + +@pytest.mark.asyncio +async def test_upload_to_firebase_upload_failure(monkeypatch): + """Raises RuntimeError if blob.upload_from_string fails.""" + monkeypatch.setattr(storage_module, "use_emulator", True) + + processed_images = {"thumbnail": b"thumb"} + mock_blob = MagicMock() + mock_blob.upload_from_string.side_effect = Exception("upload fail") + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + + monkeypatch.setattr(storage_module.storage, "bucket", lambda name: mock_bucket) + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + + with pytest.raises(RuntimeError, match="Failed to upload image to Firebase"): + await service.upload_to_firebase(processed_images, "file/path") + + +@pytest.mark.asyncio +async def test_upload_to_firebase_firebase_error(monkeypatch): + """Raises FirebaseError if storage.bucket fails with FirebaseError.""" + monkeypatch.setattr(storage_module, "use_emulator", True) + + processed_images = {"thumbnail": b"thumb"} + + def raise_firebase_error(name): + raise FirebaseError("unknown", "Firebase error") + + monkeypatch.setattr(storage_module.storage, "bucket", raise_firebase_error) + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + + with pytest.raises(FirebaseError): + await service.upload_to_firebase(processed_images, "file/path") + + +@pytest.mark.asyncio +async def test_move_quarantine_to_public_success(monkeypatch, tmp_path): + """Successfully moves a file from quarantine to public: processes image, uploads, and deletes local file.""" + # Create a dummy quarantine file + quarantine_file = tmp_path / "file.webp" + quarantine_file.write_bytes(b"dummy content") + + public_path = "public/folder/path" + + # Patch process_image and upload_to_firebase + monkeypatch.setattr( + service, + "process_image", + AsyncMock(return_value={"thumbnail": b"thumb", "full": b"full"}), + ) + monkeypatch.setattr( + service, + "upload_to_firebase", + AsyncMock(return_value={"thumbnail": "url_thumb", "full": "url_full"}), + ) + + urls = await service.move_quarantine_to_public(str(quarantine_file), public_path) + + # Assertions + assert urls == {"thumbnail": "url_thumb", "full": "url_full"} + assert not quarantine_file.exists() # file should be deleted + + +@pytest.mark.asyncio +async def test_move_quarantine_to_public_file_missing(monkeypatch): + """Raises ValueError if the quarantine file does not exist.""" + with pytest.raises(ValueError, match="Quarantine file not found"): + await service.move_quarantine_to_public( + "nonexistent_file.webp", "public/folder/path" + ) + + +@pytest.mark.asyncio +async def test_move_quarantine_to_public_process_failure(monkeypatch, tmp_path): + """Raises RuntimeError if image processing fails; quarantine file remains on disk.""" + quarantine_file = tmp_path / "file.webp" + quarantine_file.write_bytes(b"dummy content") + + # Patch process_image to raise an exception + monkeypatch.setattr( + service, + "process_image", + AsyncMock(side_effect=RuntimeError("Processing failed")), + ) + + with pytest.raises(RuntimeError, match="Processing failed"): + await service.move_quarantine_to_public( + str(quarantine_file), "public/folder/path" + ) + + # Quarantine file should still exist if processing fails + assert quarantine_file.exists() + + +@pytest.mark.asyncio +async def test_move_quarantine_to_public_upload_failure(monkeypatch, tmp_path): + """Raises RuntimeError if upload_to_firebase fails; quarantine file remains on disk.""" + quarantine_file = tmp_path / "file.webp" + quarantine_file.write_bytes(b"dummy content") + + monkeypatch.setattr( + service, "process_image", AsyncMock(return_value={"thumbnail": b"thumb"}) + ) + monkeypatch.setattr( + service, + "upload_to_firebase", + AsyncMock(side_effect=RuntimeError("Upload failed")), + ) + + with pytest.raises(RuntimeError, match="Upload failed"): + await service.move_quarantine_to_public( + str(quarantine_file), "public/folder/path" + ) + + # Quarantine file should still exist if upload fails + assert quarantine_file.exists() + + +@pytest.mark.asyncio +async def test_process_image_success(monkeypatch): + """Returns processed image dictionary when image processing succeeds.""" + dummy_content = b"dummy image bytes" + + # Patch the actual process_image in image_processor.py + monkeypatch.setattr( + "app.services.storage.process_image", + AsyncMock( + return_value={"thumbnail": b"thumb", "medium": b"medium", "full": b"full"} + ), + ) + + result = await service.process_image(dummy_content) + + assert result == {"thumbnail": b"thumb", "medium": b"medium", "full": b"full"} + + +@pytest.mark.asyncio +async def test_process_image_failure(monkeypatch): + """Raises RuntimeError if the underlying image processing function fails.""" + dummy_content = b"dummy image bytes" + + # Patch the actual process_image to raise an exception + monkeypatch.setattr( + "app.services.storage.process_image", + AsyncMock(side_effect=Exception("Some error")), + ) + + with pytest.raises(RuntimeError, match="Failed to process image."): + await service.process_image(dummy_content) + + +@pytest.mark.asyncio +async def test_delete_image_success(monkeypatch): + """Deletes all image size variants successfully and returns True.""" + # Mock bucket and blob + mock_blob = MagicMock() + mock_bucket = MagicMock() + mock_bucket.blob.return_value = mock_blob + + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + monkeypatch.setattr(storage_module.storage, "bucket", lambda name: mock_bucket) + + result = await service.delete_image("folder/file") + + assert result is True + assert mock_blob.delete.call_count == 3 + expected_calls = [ + f"folder/file_{size}.webp" for size in ["thumbnail", "medium", "full"] + ] + actual_calls = [call[0][0] for call in mock_bucket.blob.call_args_list] + assert actual_calls == expected_calls + + +@pytest.mark.asyncio +async def test_delete_image_failure(monkeypatch): + """Returns False if deleting images fails due to an exception.""" + monkeypatch.setattr(service, "get_bucket", lambda: "bucket_name") + + def raise_error(name): + raise Exception("Firebase failure") + + monkeypatch.setattr(storage_module.storage, "bucket", raise_error) + + result = await service.delete_image("folder/file") + + assert result is False + + +@pytest.mark.asyncio +async def test_upload_image_workflow_success(monkeypatch, tmp_path): + """Successfully validates, saves to quarantine, processes, and uploads an image.""" + dummy_file = UploadFile( + filename="image.png", file=tmp_path.joinpath("dummy").open("wb+") + ) + dummy_file.file.write(b"dummy content") + dummy_file.file.seek(0) + + # Patch validate_file, save_to_quarantine, move_quarantine_to_public + monkeypatch.setattr(service, "validate_file", AsyncMock(return_value=True)) + monkeypatch.setattr( + service, + "save_to_quarantine", + AsyncMock(return_value=str(tmp_path / "quarantine_file.webp")), + ) + monkeypatch.setattr( + service, + "move_quarantine_to_public", + AsyncMock(return_value={"thumbnail": "url_thumb"}), + ) + monkeypatch.setattr( + service, + "generate_secure_file_path", + lambda entity_id, folder, filename: f"{folder}/{entity_id}/secure_id.png", + ) + + urls = await service.upload_image_workflow(dummy_file, "users", "user123") + + assert urls == {"thumbnail": "url_thumb"} + service.validate_file.assert_awaited_once_with(dummy_file) + service.save_to_quarantine.assert_awaited_once() + service.move_quarantine_to_public.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_upload_image_workflow_invalid_file(monkeypatch): + """Raises RuntimeError when the file is invalid.""" + dummy_file = AsyncMock() + dummy_file.read = AsyncMock(return_value=b"dummy content") + + monkeypatch.setattr(service, "validate_file", AsyncMock(return_value=False)) + + with pytest.raises(RuntimeError, match="Image upload failed."): + await service.upload_image_workflow(dummy_file, "users", "user123") + + +@pytest.mark.asyncio +async def test_upload_image_workflow_processing_failure(monkeypatch, tmp_path): + """Raises RuntimeError if moving file from quarantine to public fails.""" + dummy_file = UploadFile( + filename="image.png", file=tmp_path.joinpath("dummy").open("wb+") + ) + dummy_file.file.write(b"dummy content") + dummy_file.file.seek(0) + + monkeypatch.setattr(service, "validate_file", AsyncMock(return_value=True)) + monkeypatch.setattr( + service, + "save_to_quarantine", + AsyncMock(return_value=str(tmp_path / "quarantine_file.webp")), + ) + monkeypatch.setattr( + service, + "move_quarantine_to_public", + AsyncMock(side_effect=RuntimeError("Upload failed")), + ) + monkeypatch.setattr( + service, + "generate_secure_file_path", + lambda entity_id, folder, filename: f"{folder}/{entity_id}/secure_id.png", + ) + + with pytest.raises(RuntimeError, match="Image upload failed."): + await service.upload_image_workflow(dummy_file, "users", "user123") diff --git a/backend/tests/user/test_user_service.py b/backend/tests/user/test_user_service.py index 88c93bbd..eaf42cae 100644 --- a/backend/tests/user/test_user_service.py +++ b/backend/tests/user/test_user_service.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta, timezone -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch import pytest from app.database import get_database @@ -300,3 +300,98 @@ async def test_delete_user_invalid_object_id(mock_db_client, mock_get_database): # Expected result: False and never hit the DB mock_db_client.users.delete_one.assert_not_called() assert result is False + + +# --- Tests for update_user_avatar_url --- + + +@pytest.mark.asyncio +async def test_update_user_avatar_url_success(mock_db_client, mock_get_database): + """Test successful user avatar URL update""" + user_id = "642f1e4a9b3c2d1f6a1b2c3d" + image_url = "https://example.com/avatar.jpg" + + # Mock successful update (1 document modified) + mock_result = AsyncMock() + mock_result.modified_count = 1 + mock_db_client.users.update_one.return_value = mock_result + + result = await user_service.update_user_avatar_url(user_id, image_url) + + assert result is True + mock_db_client.users.update_one.assert_called_once_with( + {"_id": ObjectId(user_id)}, {"$set": {"imageUrl": image_url}} + ) + + +@pytest.mark.asyncio +async def test_update_user_avatar_url_no_document_modified( + mock_db_client, mock_get_database +): + """Test when no document is modified (user not found)""" + user_id = "642f1e4a9b3c2d1f6a1b2c3d" + image_url = "https://example.com/avatar.jpg" + + # Mock no documents modified + mock_result = AsyncMock() + mock_result.modified_count = 0 + mock_db_client.users.update_one.return_value = mock_result + + result = await user_service.update_user_avatar_url(user_id, image_url) + + assert result is False + + +@pytest.mark.asyncio +async def test_update_user_avatar_url_invalid_object_id( + mock_db_client, mock_get_database +): + """Test with invalid ObjectId format""" + invalid_user_id = "invalid_object_id" # Not a 24-char hex string + image_url = "https://example.com/avatar.jpg" + + with pytest.raises(Exception): # ObjectId will raise an exception + await user_service.update_user_avatar_url(invalid_user_id, image_url) + + # Should never hit the database + mock_db_client.users.update_one.assert_not_called() + + +@pytest.mark.asyncio +async def test_update_user_avatar_url_empty_image_url( + mock_db_client, mock_get_database +): + """Test updating with empty image URL""" + user_id = "642f1e4a9b3c2d1f6a1b2c3d" + image_url = "" + + # Mock successful update + mock_result = AsyncMock() + mock_result.modified_count = 1 + mock_db_client.users.update_one.return_value = mock_result + + result = await user_service.update_user_avatar_url(user_id, image_url) + + assert result is True + mock_db_client.users.update_one.assert_called_once_with( + {"_id": ObjectId(user_id)}, {"$set": {"imageUrl": image_url}} + ) + + +@pytest.mark.asyncio +async def test_update_user_avatar_url_none_image_url(mock_db_client, mock_get_database): + """Test updating with None image URL""" + user_id = "642f1e4a9b3c2d1f6a1b2c3d" + image_url = None + + # Mock successful update + mock_result = AsyncMock() + mock_result.modified_count = 1 + mock_db_client.users.update_one.return_value = mock_result + + result = await user_service.update_user_avatar_url(user_id, image_url) + + assert result is True + mock_db_client.users.update_one.assert_called_once_with( + {"_id": ObjectId(user_id)}, {"$set": {"imageUrl": image_url}} + )