Skip to content

Commit 491f4a0

Browse files
committed
smaller dockerfile
1 parent 846ccd2 commit 491f4a0

File tree

1 file changed

+29
-19
lines changed

1 file changed

+29
-19
lines changed

dockerfile.prod

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,47 @@
1-
# Use an official Python runtime as a parent image
2-
FROM python:3.12-slim
1+
# Build stage
2+
FROM python:3.12-slim-bullseye as builder
33

4-
# Set working directory in the container
54
WORKDIR /app
65

7-
# Install system dependencies including Ollama
6+
# Install build dependencies
87
RUN apt-get update && apt-get install -y \
98
curl \
9+
build-essential \
1010
&& rm -rf /var/lib/apt/lists/*
1111

12-
# Install Ollama
13-
RUN curl https://ollama.ai/install.sh | sh
14-
15-
# Copy the requirements
12+
# Copy and install requirements with CPU-only PyTorch
1613
COPY requirements.txt .
14+
RUN pip install --no-cache-dir -r requirements.txt \
15+
&& pip uninstall -y torch \
16+
&& pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu
17+
18+
# Final stage
19+
FROM python:3.12-slim-bullseye
20+
21+
WORKDIR /app
22+
23+
# Install only essential runtime dependencies
24+
RUN apt-get update && apt-get install -y \
25+
curl \
26+
&& rm -rf /var/lib/apt/lists/* \
27+
&& curl https://ollama.ai/install.sh | sh
1728

18-
# Install Python dependencies
19-
RUN pip install --no-cache-dir -r requirements.txt
29+
# Copy only necessary Python packages
30+
COPY --from=builder /usr/local/lib/python3.12/site-packages/ /usr/local/lib/python3.12/site-packages/
2031

21-
# Copy the application
22-
COPY . .
32+
# Copy application files
33+
COPY app.py .
34+
COPY dogs_cleaned.csv .
2335

24-
# Start Ollama service and pull the model
36+
# Start Ollama, pull model, and clean up unnecessary files
2537
RUN ollama serve & \
2638
sleep 10 && \
27-
ollama pull mistral
39+
ollama pull mistral && \
40+
rm -rf /root/.cache
2841

29-
# Expose the port Streamlit runs on
3042
EXPOSE 8080
3143

32-
# Create a shell script to start both Ollama and Streamlit
33-
RUN echo '#!/bin/bash\nollama serve & sleep 5 && streamlit run app.py --server.address=0.0.0.0 --server.port=8080' > start.sh
34-
RUN chmod +x start.sh
44+
RUN echo '#!/bin/bash\nollama serve & sleep 5 && streamlit run app.py --server.address=0.0.0.0 --server.port=8080' > start.sh \
45+
&& chmod +x start.sh
3546

36-
# Command to run the script
3747
CMD ["./start.sh"]

0 commit comments

Comments
 (0)