Spaces:
Runtime error
Runtime error
Sidharthan
commited on
Commit
•
6acb880
1
Parent(s):
e65401c
Resolving the files loading issues
Browse files- Dockerfile +1 -1
- app.py +4 -0
Dockerfile
CHANGED
@@ -19,7 +19,7 @@ RUN pip install --no-cache-dir --upgrade pip && \
|
|
19 |
pip install --no-cache-dir -r requirements.txt
|
20 |
|
21 |
# Set environment variable for Hugging Face cache
|
22 |
-
ENV
|
23 |
|
24 |
# Create the cache directory with proper permissions
|
25 |
RUN mkdir -p /app/cache && chmod 777 /app/cache
|
|
|
19 |
pip install --no-cache-dir -r requirements.txt
|
20 |
|
21 |
# Set environment variable for Hugging Face cache
|
22 |
+
ENV HF_HOME=/app/cache
|
23 |
|
24 |
# Create the cache directory with proper permissions
|
25 |
RUN mkdir -p /app/cache && chmod 777 /app/cache
|
app.py
CHANGED
@@ -5,6 +5,9 @@ from transformers import AutoTokenizer
|
|
5 |
from peft import AutoPeftModelForCausalLM
|
6 |
import torch
|
7 |
from typing import Optional
|
|
|
|
|
|
|
8 |
|
9 |
app = FastAPI(title="Gemma Script Generator API")
|
10 |
|
@@ -20,6 +23,7 @@ try:
|
|
20 |
MODEL_NAME,
|
21 |
device_map="auto", # Will use CPU if GPU not available
|
22 |
trust_remote_code=True,
|
|
|
23 |
#load_in_4bit=True
|
24 |
)
|
25 |
except Exception as e:
|
|
|
5 |
from peft import AutoPeftModelForCausalLM
|
6 |
import torch
|
7 |
from typing import Optional
|
8 |
+
import os
|
9 |
+
|
10 |
+
os.environ['HF_HOME'] = '/app/cache'
|
11 |
|
12 |
app = FastAPI(title="Gemma Script Generator API")
|
13 |
|
|
|
23 |
MODEL_NAME,
|
24 |
device_map="auto", # Will use CPU if GPU not available
|
25 |
trust_remote_code=True,
|
26 |
+
cache_dir = '/app/cache'
|
27 |
#load_in_4bit=True
|
28 |
)
|
29 |
except Exception as e:
|