Spaces:
Sleeping
Sleeping
AurelioAguirre
commited on
Commit
•
2bd30ac
1
Parent(s):
54fe7a5
Added gitignore and fixed Dockerfile.
Browse files- .gitignore +44 -0
- Dockerfile +30 -27
.gitignore
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
># Environment files
|
2 |
+
.env
|
3 |
+
.env.*
|
4 |
+
|
5 |
+
# Virtual environment
|
6 |
+
myenv/
|
7 |
+
venv/
|
8 |
+
ENV/
|
9 |
+
|
10 |
+
# Model checkpoints
|
11 |
+
checkpoints/
|
12 |
+
|
13 |
+
# Python
|
14 |
+
__pycache__/
|
15 |
+
*.py[cod]
|
16 |
+
*$py.class
|
17 |
+
*.so
|
18 |
+
.Python
|
19 |
+
build/
|
20 |
+
develop-eggs/
|
21 |
+
dist/
|
22 |
+
downloads/
|
23 |
+
eggs/
|
24 |
+
.eggs/
|
25 |
+
lib/
|
26 |
+
lib64/
|
27 |
+
parts/
|
28 |
+
sdist/
|
29 |
+
var/
|
30 |
+
wheels/
|
31 |
+
*.egg-info/
|
32 |
+
.installed.cfg
|
33 |
+
*.egg
|
34 |
+
|
35 |
+
# IDE
|
36 |
+
.idea/
|
37 |
+
.vscode/
|
38 |
+
*.swp
|
39 |
+
*.swo
|
40 |
+
.DS_Store
|
41 |
+
|
42 |
+
# Logs
|
43 |
+
*.log
|
44 |
+
logs/
|
Dockerfile
CHANGED
@@ -1,43 +1,46 @@
|
|
1 |
-
# Use
|
2 |
-
FROM
|
3 |
|
4 |
-
# Set working directory
|
5 |
-
WORKDIR /
|
6 |
|
7 |
-
# Install system dependencies
|
8 |
-
RUN apt-get update &&
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
&& rm -rf /var/lib/apt/lists/*
|
13 |
|
14 |
-
#
|
15 |
COPY requirements.txt .
|
16 |
-
RUN pip3 install --no-cache-dir -r requirements.txt
|
17 |
|
18 |
-
# Install
|
19 |
-
RUN
|
20 |
-
einops \
|
21 |
-
xformers \
|
22 |
-
bitsandbytes \
|
23 |
-
accelerate \
|
24 |
-
sentencepiece
|
25 |
|
26 |
-
# Copy the application
|
27 |
COPY . .
|
28 |
|
29 |
-
# Create
|
30 |
-
RUN mkdir -p /
|
31 |
-
|
32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
# Set environment variables
|
35 |
-
ENV PYTHONPATH=/code
|
36 |
ENV LLM_ENGINE_HOST=0.0.0.0
|
37 |
-
ENV LLM_ENGINE_PORT=
|
|
|
38 |
|
39 |
# Expose the port the app runs on
|
40 |
EXPOSE 8001
|
41 |
|
42 |
# Command to run the application
|
43 |
-
CMD ["
|
|
|
1 |
+
# Use Python 3.10 as base image for better compatibility with ML libraries
|
2 |
+
FROM python:3.10-slim
|
3 |
|
4 |
+
# Set working directory
|
5 |
+
WORKDIR /app
|
6 |
|
7 |
+
# Install git and required system dependencies
|
8 |
+
RUN apt-get update && \
|
9 |
+
apt-get install -y git && \
|
10 |
+
apt-get clean && \
|
11 |
+
rm -rf /var/lib/apt/lists/*
|
|
|
12 |
|
13 |
+
# Copy requirements first to leverage Docker cache
|
14 |
COPY requirements.txt .
|
|
|
15 |
|
16 |
+
# Install Python dependencies
|
17 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
+
# Copy the rest of the application
|
20 |
COPY . .
|
21 |
|
22 |
+
# Create checkpoints directory
|
23 |
+
RUN mkdir -p /app/checkpoints
|
24 |
+
|
25 |
+
# The token will be passed during build time
|
26 |
+
ARG HF_TOKEN
|
27 |
+
ENV HF_TOKEN=${HF_TOKEN}
|
28 |
+
|
29 |
+
# Download the Llama 2 model using litgpt
|
30 |
+
# Only proceed if HF_TOKEN is provided
|
31 |
+
RUN if [ -n "$HF_TOKEN" ]; then \
|
32 |
+
python -c "from huggingface_hub import login; from litgpt.cli import download; login('${HF_TOKEN}'); download('meta-llama/Llama-2-3b-chat-hf', '/app/checkpoints')"; \
|
33 |
+
else \
|
34 |
+
echo "No Hugging Face token provided. Model will need to be downloaded separately."; \
|
35 |
+
fi
|
36 |
|
37 |
# Set environment variables
|
|
|
38 |
ENV LLM_ENGINE_HOST=0.0.0.0
|
39 |
+
ENV LLM_ENGINE_PORT=8001
|
40 |
+
ENV MODEL_PATH=/app/checkpoints/meta-llama/Llama-2-3b-chat-hf
|
41 |
|
42 |
# Expose the port the app runs on
|
43 |
EXPOSE 8001
|
44 |
|
45 |
# Command to run the application
|
46 |
+
CMD ["python", "main/main.py"]
|