-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathDockerfile
More file actions
69 lines (57 loc) · 2.04 KB
/
Dockerfile
File metadata and controls
69 lines (57 loc) · 2.04 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# ✅ Base image with Python 3.10 and CUDA runtime libs for optional GPU support
FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04
# Avoid interactive prompts during apt installs
ENV DEBIAN_FRONTEND=noninteractive
ENV PYTHONUNBUFFERED=1
# Install Python + basic tools
RUN apt-get update && \
apt-get install -y --no-install-recommends \
python3 python3-pip python3-venv python3-dev \
ffmpeg git build-essential libsndfile1 && \
ln -sf /usr/bin/python3 /usr/bin/python && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Copy project files
COPY . .
# Move to backend folder
WORKDIR /app/Backend-new
# Upgrade pip and install core dependencies
RUN pip install --upgrade pip setuptools wheel
# Install all your dependencies (except torch for now)
RUN pip install --no-cache-dir \
"fastapi[all]" \
uvicorn \
ollama \
langchain \
langchain-ollama \
langchain-community \
langchain-huggingface \
langchain-chroma \
chromadb \
sentence-transformers \
python-dotenv \
"marker-pdf[full]" \
speechrecognition \
unstructured \
markdown \
pydub \
sounddevice
# ✅ Install CPU-only PyTorch first (small & compatible)
RUN pip install --no-cache-dir torch torchvision --index-url https://download.pytorch.org/whl/cpu
# ✅ Optional GPU upgrade — runs at container startup, not during build
# This ensures you don’t download CUDA wheels unless you have a GPU
RUN echo '#!/bin/bash\n' \
'if command -v nvidia-smi &>/dev/null; then\n' \
' echo "🔋 GPU detected — upgrading to CUDA-enabled PyTorch..."\n' \
' pip install --no-cache-dir torch torchvision --index-url https://download.pytorch.org/whl/cu128\n' \
'else\n' \
' echo "⚙️ Running in CPU mode (no GPU detected)."\n' \
'fi\n' \
'exec "$@"' > /entrypoint.sh && chmod +x /entrypoint.sh
# Expose your FastAPI port
EXPOSE 8000
# Use entrypoint to auto-upgrade torch if GPU exists
ENTRYPOINT ["/entrypoint.sh"]
# Start the backend
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]