Create Dockerfile
Browse files- Dockerfile +21 -0
Dockerfile
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use an official NVIDIA base image with PyTorch, CUDA, and cuDNN pre-installed
|
2 |
+
# We choose a version with Python 3.11 to match your previous environment
|
3 |
+
FROM nvcr.io/nvidia/pytorch:24.04-py3
|
4 |
+
|
5 |
+
# Set the working directory
|
6 |
+
WORKDIR /repository
|
7 |
+
|
8 |
+
# Copy the requirements file into the container
|
9 |
+
COPY requirements.txt .
|
10 |
+
|
11 |
+
# Install your Python dependencies
|
12 |
+
# Using --no-cache-dir is good practice for smaller image sizes
|
13 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
14 |
+
|
15 |
+
# Copy the rest of your application code (e.g., your handler.py)
|
16 |
+
COPY . .
|
17 |
+
|
18 |
+
# (Optional) Set an environment variable if needed by your platform
|
19 |
+
# ENV TRANSFORMERS_CACHE="/repository/cache"
|
20 |
+
|
21 |
+
# Your inference platform will typically handle the CMD or ENTRYPOINT
|