Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
/.next/
/out/

/api/.env

# production
/build

Expand Down
Empty file added api/__init__.py
Empty file.
165 changes: 165 additions & 0 deletions api/index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
from fastapi import FastAPI, HTTPException, UploadFile, File, Form, Depends
from fastapi.middleware.cors import CORSMiddleware
import os
import tempfile
import base64
from google import genai
from google.genai import types
from dotenv import load_dotenv
from typing import Optional
import shutil
import json
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlalchemy import desc

# Import database models
from .logic.database import get_db, VideoAnalysis, init_db

# Import Pydantic models
from .logic.models import MealPlan

GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", None)

# Load environment variables
load_dotenv()

# Configure Gemini API

app = FastAPI(title="Vercel FastAPI")

# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

# Initialize database tables on startup
@app.on_event("startup")
async def startup_db_client():
await init_db()

@app.get("/")
async def root():
return {"message": "Welcome to FastAPI on Vercel!"}


@app.get("/hello")
async def hello():
return {"message": "Hello World"}


@app.post("/analyze-video", response_model=MealPlan)
async def analyze_video(
video: UploadFile = File(...),
db: AsyncSession = Depends(get_db),
):
if not GEMINI_API_KEY:
raise HTTPException(status_code=500, detail="Gemini API key not configured")

if not video.content_type or not video.content_type.startswith("video/"):
raise HTTPException(status_code=400, detail="File must be a video")

# Save uploaded file temporarily
with tempfile.NamedTemporaryFile(
delete=False, suffix=os.path.splitext(video.filename)[1]
) as temp_video:
shutil.copyfileobj(video.file, temp_video)
temp_video_path = temp_video.name



# Initialize Gemini client
client = genai.Client(api_key=GEMINI_API_KEY)

# Read the video file and encode it
with open(temp_video_path, "rb") as f:
video_bytes = f.read()
video_base64 = base64.b64encode(video_bytes).decode("utf-8")

# Define the output structure - now in the prompt
prompt = """
Analyze this video showing food items and create a meal plan.

Based on the food items visible in the video, generate:
1. Recipes with detailed ingredients and instructions for the meal plans. Meal plans should be for the whole week. Monday to Sunday.
2. A complete shopping list with all required ingredients for the meal plans if you don't have the ingredients in your kitchen. Which are not in the video.

"""

# Create content with video data
contents = [
types.Content(
role="user",
parts=[
types.Part.from_text(text=prompt),
types.Part(
inline_data=types.Blob(
mime_type=video.content_type,
data=video_bytes
)
),
],
),
]

# Generate content with temperature=0 and JSON response type
response = client.models.generate_content(
model="gemini-2.0-flash",
contents=contents,
config=types.GenerateContentConfig(
response_mime_type="application/json",
temperature=0,
response_schema=MealPlan
)
)


# Check if analysis already exists
result = await db.execute(
select(VideoAnalysis).order_by(desc(VideoAnalysis.created_at)).limit(1)
)
existing_analysis = result.scalars().first()

# If analysis exists, delete it
if existing_analysis:
await db.delete(existing_analysis)
await db.commit()

# Create new analysis
analysis = VideoAnalysis(
filename=video.filename,
content_type=video.content_type,
prompt=prompt,
analysis_text=response.parsed.model_dump_json(),
)

db.add(analysis)
await db.commit()
await db.refresh(analysis)

if os.path.exists(temp_video_path):
os.unlink(temp_video_path)

return response.parsed





@app.get("/analysis", response_model=MealPlan)
async def get_latest_analysis(db: AsyncSession = Depends(get_db)):
"""Get the most recent video analysis"""
result = await db.execute(
select(VideoAnalysis).order_by(desc(VideoAnalysis.created_at)).limit(1)
)
analysis = result.scalars().first()

if not analysis:
raise HTTPException(status_code=404, detail="No analysis found")

# Return the analysis_text directly which contains the meal plan
return analysis.analysis_text
Empty file added api/logic/__init__.py
Empty file.
Binary file added api/logic/__pycache__/__init__.cpython-312.pyc
Binary file not shown.
Binary file added api/logic/__pycache__/database.cpython-312.pyc
Binary file not shown.
Binary file added api/logic/__pycache__/models.cpython-312.pyc
Binary file not shown.
11 changes: 11 additions & 0 deletions api/logic/create_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import asyncio
from api.logic.database import init_db

async def create_tables():
"""Initialize the database tables."""
print("Creating database tables...")
await init_db()
print("Database tables created successfully!")

if __name__ == "__main__":
asyncio.run(create_tables())
47 changes: 47 additions & 0 deletions api/logic/database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy import Column, Integer, String, Text, DateTime, JSON
import os
from datetime import datetime
from dotenv import load_dotenv

load_dotenv()

DATABASE_URL = os.getenv("DATABASE_URL")
if not DATABASE_URL:
DATABASE_URL = "sqlite+aiosqlite:///./test.db" # Fallback for development

# Create async engine
engine = create_async_engine(DATABASE_URL, echo=True)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)

Base = declarative_base()


class VideoAnalysis(Base):
__tablename__ = "video_analyses"

id = Column(Integer, primary_key=True, index=True)
filename = Column(String(255), nullable=False)
content_type = Column(String(100), nullable=False)
prompt = Column(Text, nullable=True)
analysis_text = Column(JSON, nullable=False)
created_at = Column(DateTime, default=datetime.utcnow)

def __repr__(self):
return f"<VideoAnalysis {self.id}: {self.filename}>"


async def get_db():
"""Dependency for getting async DB session"""
async with async_session() as session:
try:
yield session
finally:
await session.close()


async def init_db():
"""Initialize the database with tables"""
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
68 changes: 68 additions & 0 deletions api/logic/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
from pydantic import BaseModel
from datetime import datetime
from typing import Optional, List


class VideoAnalysisBase(BaseModel):
"""Base model for video analysis data"""

filename: str
content_type: str
prompt: Optional[str] = None


class VideoAnalysisCreate(VideoAnalysisBase):
"""Model for creating a video analysis"""

analysis_text: str


class VideoAnalysisResponse(VideoAnalysisBase):
"""Model for returning a video analysis"""

id: int
analysis_text: str
created_at: datetime

class Config:
from_attributes = (
True # Enables ORM mode (renamed from orm_mode in Pydantic v2)
)


class VideoAnalysisList(BaseModel):
"""Model for returning a list of video analyses"""

items: list[VideoAnalysisResponse]
count: int


# Meal planning models
class Ingredient(BaseModel):
name: str
quantity: float
unit: str


class Recipe(BaseModel):
name: str
ingredients: List[Ingredient]
instructions: List[str]


class ShoppingList(BaseModel):
items: List[Ingredient]


class DayMeal(BaseModel):
day: str # Monday, Tuesday, etc.
breakfast: Optional[str] = None
lunch: Optional[str] = None
dinner: Optional[str] = None
recipe_refs: List[str] = [] # References to recipes by name


class MealPlan(BaseModel):
shopping_list: ShoppingList
recipes: List[Recipe]
days: List[DayMeal] = [] # Weekly meal plan organized by days
10 changes: 10 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@

services:
postgres:
image: postgres:15
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: password
POSTGRES_DB: dbname
ports:
- "5432:5432"
25 changes: 25 additions & 0 deletions next.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,31 @@ const nextConfig = {
parallelServerBuildTraces: true,
parallelServerCompiles: true,
},
rewrites: async () => {
return [
{
source: "/api/:path*",
destination:
process.env.NODE_ENV === "development"
? "http://127.0.0.1:8000/:path*"
: "/api/",
},
{
source: "/docs",
destination:
process.env.NODE_ENV === "development"
? "http://127.0.0.1:8000/docs"
: "/api/docs",
},
{
source: "/openapi.json",
destination:
process.env.NODE_ENV === "development"
? "http://127.0.0.1:8000/openapi.json"
: "/api/openapi.json",
},
];
},
}

if (userConfig) {
Expand Down
12 changes: 12 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
fastapi==0.104.1
uvicorn==0.23.2
mangum==0.17.0
pydantic==2.4.2
python-multipart==0.0.6
google-generativeai==0.3.1
python-dotenv==1.0.0
sqlalchemy==2.0.23
psycopg2-binary==2.9.9
asyncpg==0.28.0
aiosqlite==0.19.0
google-genai