#FROM ubuntu
FROM python:3.11-slim-bullseye
#FROM nvidia/cuda:12.3.0-devel-ubuntu22.04
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \
   python3 \
    python3-pip \
    ninja-build \
    build-essential \
    pkg-config \
    curl \
    cmake \
    gnupg2 \
    wget \
    musl-dev

RUN ln -s /usr/lib/x86_64-linux-musl/libc.so /lib/libc.musl-x86_64.so.1

WORKDIR /code

RUN chmod 777 .

COPY ./requirements.txt /code/requirements.txt

RUN CMAKE_ARGS="-DLLAMA_NATIVE=off" pip install llama-cpp-python==0.2.78 \
  --force-reinstall --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
  
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

RUN useradd -m -u 1000 user

USER user
    
ENV HOME=/home/user \
	PATH=/home/user/.local/bin:$PATH \
    TF_ENABLE_ONEDNN_OPTS=0 \
    HOST=0.0.0.0 \
    PORT=7860 \
    ORIGINS=*

WORKDIR $HOME/app

COPY --chown=user . $HOME/app

RUN chmod 777 .

EXPOSE 7860

CMD ["python", "-m", "main"]