Spaces:
Runtime error
Runtime error
File size: 2,608 Bytes
939ba16 072368f 939ba16 8011bf0 edef5f8 939ba16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
# Use an official Python runtime as a base image
FROM python:3.9-slim
# Set the working directory in the container to /app
WORKDIR /app
# Copy the current directory contents into the container at /app
ADD . /app
# Install any needed packages specified in requirements.txt
RUN apt-get update && \
apt-get install -y aria2 curl gcc python3-dev && \
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
RUN pip install --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Install accelerate and bitsandbytes
RUN pip install accelerate
RUN pip install -i https://test.pypi.org/simple/ bitsandbytes
# Create directory and download model
RUN mkdir -p /app/medllama2_7b && \
chmod -R 777 /app/medllama2_7b && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/model-00001-of-00002.safetensors -d /app/medllama2_7b -o model-00001-of-00002.safetensors && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/model-00002-of-00002.safetensors -d /app/medllama2_7b -o model-00002-of-00002.safetensors && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/model.safetensors.index.json -d /app/medllama2_7b -o model.safetensors.index.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/special_tokens_map.json -d /app/medllama2_7b -o special_tokens_map.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/tokenizer.json -d /app/medllama2_7b -o tokenizer.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/tokenizer_config.json -d /app/medllama2_7b -o tokenizer_config.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/config.json -d /app/medllama2_7b -o config.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/generation_config.json -d /app/medllama2_7b -o generation_config.json && \
aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/tokenizer.model -d /app/medllama2_7b -o tokenizer.model
# Make port 80 available to the world outside this container
EXPOSE 80
# Run app.py when the container launches
CMD ["python", "app.py"]
|