Hebrew
meirk's picture
Update Dockerfile
d3f8264 verified
# 1. Start from the same great NVIDIA base image
FROM nvcr.io/nvidia/pytorch:24.04-py3
# 2. Set the working directory
WORKDIR /repository
# 3. Add Hugging Face specific environment variables
ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
ENV PORT=80
# 4. Copy and install requirements AND the HF toolkit
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt huggingface_inference_toolkit
# 5. Copy the rest of your application code
COPY . .
# 6. THE CRUCIAL PART: Set the command to run the inference server
CMD ["python", "-m", "huggingface_inference_toolkit.main", "--model_dir", "."]