Hadeel11's picture
Update file
ff6ffb5
raw
history blame
372 Bytes
FROM huggingface/transformers-pytorch-gpu:latest
RUN pip install --upgrade pip
RUN pip install transformers torch fastapi uvicorn
ENV TRANSFORMERS_CACHE=/tmp/huggingface_cache
ENV MODEL_NAME="your-username/your-finetuned-model"
ENV USE_FP16=True
COPY app.py /app/app.py
WORKDIR /app
EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]