Spaces:
Paused
Paused
| FROM nvidia/cuda:11.6.0-base-ubuntu20.04 | |
| RUN export PATH="/usr/local/cuda/bin:$PATH" | |
| RUN apt update && \ | |
| apt install --no-install-recommends -y build-essential python3 python3-pip wget curl git && \ | |
| apt clean && rm -rf /var/lib/apt/lists/* | |
| EXPOSE 8000 | |
| WORKDIR /app | |
| RUN export PATH="/usr/local/cuda/bin:$PATH" | |
| RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local | |
| RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install git+https://github.com/abetlen/llama-cpp-python --no-cache-dir | |
| COPY requirements.txt ./ | |
| RUN pip install --upgrade pip && \ | |
| pip install -r requirements.txt | |
| #RUN curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash | |
| #RUN apt-get install git-lfs -y | |
| #RUN git clone https://huggingface.co/TheBloke/robin-13B-v2-GGML | |
| #RUN mv robin-13B-v2-GGML/robin-13b.ggmlv3.q3_K_M.bin . | |
| #RUN rm -rf robin-13B-v2-GGML/ | |
| COPY . . | |
| RUN ls -al | |
| CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] |