File size: 1,003 Bytes
c9172a3 2974acd 60c7b37 7a72e74 c28ca7b 7a72e74 191256b 7a72e74 03d02d5 aea9a02 c541e24 aea9a02 e20eca3 2974acd a8cdfac c9172a3 aea9a02 90ed3f1 aea9a02 d303a21 05c5a94 fbe2880 d303a21 aea9a02 cd6e8fe c541e24 41ce9b3 f254a1b 2d51dcb cd6e8fe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
#FROM ubuntu
FROM python:3.11-slim-bullseye
#FROM nvidia/cuda:12.3.0-devel-ubuntu22.04
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \
python3 \
python3-pip \
ninja-build \
build-essential \
pkg-config \
curl \
cmake \
gnupg2 \
wget \
musl-dev
RUN ln -s /usr/lib/x86_64-linux-musl/libc.so /lib/libc.musl-x86_64.so.1
WORKDIR /code
RUN chmod 777 .
COPY ./requirements.txt /code/requirements.txt
RUN CMAKE_ARGS="-DLLAMA_NATIVE=off" pip install llama-cpp-python==0.2.78 \
--force-reinstall --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH \
TF_ENABLE_ONEDNN_OPTS=0 \
HOST=0.0.0.0 \
PORT=7860 \
ORIGINS=*
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN chmod 777 .
EXPOSE 7860
CMD ["python", "-m", "main"] |