FROM python:3.12 RUN useradd -m -u 1000 app WORKDIR /home/app RUN apt update RUN apt install -y wget make cmake clang git g++ RUN wget https://huggingface.co/neph1/llama-3.1-instruct-bellman-8b-swedish/resolve/main/gguf/llama-3.1-instruct-bellman-8b-swedish-q5_k_m.gguf?download=true -O model.gguf RUN git clone https://github.com/ggerganov/llama.cpp RUN mv llama.cpp llama_temp RUN mv llama_temp/* . RUN make RUN apt install socat -y EXPOSE 7860 CMD ["sh", "-c", "./llama-server -m /home/app/model.gguf -c 8192 --host 0.0.0.0 --port 7860"]