Spaces:
Build error
Build error
heikowagner
commited on
Commit
•
21e8045
1
Parent(s):
76fc69c
mem
Browse files- app/app.py +6 -1
app/app.py
CHANGED
@@ -4,6 +4,7 @@ import utils as ut
|
|
4 |
import elements as el
|
5 |
import os
|
6 |
import torch
|
|
|
7 |
|
8 |
persist_directory = load_model.persist_directory
|
9 |
st.title('myRetrievalGPT')
|
@@ -19,6 +20,7 @@ else:
|
|
19 |
import torch
|
20 |
torch.cuda.empty_cache()
|
21 |
|
|
|
22 |
model_type = st.selectbox(
|
23 |
'Select the Documents to be used to answer your question',
|
24 |
('OpenAI', 'decapoda-research/llama-7b-hf (gpu+cpu)', 'llama-7b 4bit (cpu only)',) )
|
@@ -34,7 +36,10 @@ else:
|
|
34 |
llm= load_model.load_openai_model()
|
35 |
elif model_type=='decapoda-research/llama-7b-hf (gpu+cpu)':
|
36 |
# Add more models here
|
37 |
-
|
|
|
|
|
|
|
38 |
else:
|
39 |
llm = load_model.load_cpu_model()
|
40 |
|
|
|
4 |
import elements as el
|
5 |
import os
|
6 |
import torch
|
7 |
+
import psutil
|
8 |
|
9 |
persist_directory = load_model.persist_directory
|
10 |
st.title('myRetrievalGPT')
|
|
|
20 |
import torch
|
21 |
torch.cuda.empty_cache()
|
22 |
|
23 |
+
st.write(str( torch.cuda.is_available()) + str(psutil.virtual_memory()))
|
24 |
model_type = st.selectbox(
|
25 |
'Select the Documents to be used to answer your question',
|
26 |
('OpenAI', 'decapoda-research/llama-7b-hf (gpu+cpu)', 'llama-7b 4bit (cpu only)',) )
|
|
|
36 |
llm= load_model.load_openai_model()
|
37 |
elif model_type=='decapoda-research/llama-7b-hf (gpu+cpu)':
|
38 |
# Add more models here
|
39 |
+
if not torch.cuda.is_available() and psutil.virtual_memory().available< 17254768640:
|
40 |
+
st.write('You do not have enough memory to use this model:' + str(psutil.virtual_memory().available))
|
41 |
+
else:
|
42 |
+
llm = load_model.load_gpu_model("decapoda-research/llama-7b-hf")
|
43 |
else:
|
44 |
llm = load_model.load_cpu_model()
|
45 |
|