# Use the official Python image with a specific version FROM python:3.12.5-slim RUN useradd -m -u 1000 user USER user ENV PATH="/home/user/.local/bin:$PATH" # Set the working directory in the container WORKDIR /app # Install system dependencies as root USER root RUN apt-get update && apt-get install -y --no-install-recommends \ gcc \ g++ \ cmake \ git \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* # Switch back to the non-root user USER user # Copy the requirements file into the container COPY --chown=user ./requirements.txt requirements.txt # Install dependencies RUN pip install llama-cpp-python RUN pip install --no-cache-dir -r requirements.txt # Copy the application code into the container COPY --chown=user ./llm.py /app/llm.py COPY --chown=user ./llama-3.2-1b-instruct-q4_k_m.gguf /app/llama-3.2-1b-instruct-q4_k_m.gguf # Expose the application port EXPOSE 7860 # Define the command to run the application CMD ["uvicorn", "llm:app", "--host", "0.0.0.0", "--port", "7860"]