|
FROM ollama/ollama:latest |
|
|
|
RUN apt-get update && apt-get install -y curl |
|
|
|
|
|
RUN useradd -m -u 1000 user |
|
|
|
|
|
USER user |
|
|
|
|
|
ENV HOME=/home/user \ |
|
PATH=/home/user/.local/bin:$PATH \ |
|
OLLAMA_HOST=0.0.0.0 |
|
|
|
|
|
WORKDIR $HOME/app |
|
|
|
|
|
COPY --chown=user:user Modelfile $HOME/app/ |
|
|
|
|
|
RUN curl -fsSL -o llama.gguf "https://huggingface.co/gingdev/llama7b-ictu-v2/resolve/main/llama7b_q4_k_m.gguf?download=true" |
|
|
|
|
|
RUN ollama serve & \ |
|
timeout 30 sh -c 'until curl -s http://127.0.0.1:11434/health; do sleep 1; done' && \ |
|
ollama create llama -f Modelfile |
|
|
|
|
|
EXPOSE 11434 |
|
|