Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,29 @@ from streaming import stream_to_gradio
|
|
9 |
from huggingface_hub import login
|
10 |
from gradio.data_classes import FileData
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
login(os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
|
13 |
|
|
|
14 |
llm_engine = HfEngine("meta-llama/Meta-Llama-3.1-70B-Instruct")
|
15 |
|
16 |
agent = ReactCodeAgent(
|
|
|
9 |
from huggingface_hub import login
|
10 |
from gradio.data_classes import FileData
|
11 |
|
12 |
+
|
13 |
+
|
14 |
+
import torch
|
15 |
+
|
16 |
+
# Initialize ZeroGPU
|
17 |
+
if torch.cuda.is_available():
|
18 |
+
torch.backends.cudnn.benchmark = True
|
19 |
+
torch.backends.cudnn.enabled = True
|
20 |
+
torch.backends.cudnn.allow_tf32 = True
|
21 |
+
|
22 |
+
import os
|
23 |
+
os.environ['WANDB_DISABLED'] = 'true'
|
24 |
+
os.environ['TOKENIZERS_PARALLELIZM'] = 'false'
|
25 |
+
os.environ['TRANSFORMERS_PARALLELIZM'] = 'false'
|
26 |
+
os.environ['HF_REPO_REWRITE'] = 'true'
|
27 |
+
|
28 |
+
from zero_gpu import ZeroGpu
|
29 |
+
ZeroGpu(use_cpu=False)
|
30 |
+
|
31 |
+
|
32 |
login(os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
|
33 |
|
34 |
+
|
35 |
llm_engine = HfEngine("meta-llama/Meta-Llama-3.1-70B-Instruct")
|
36 |
|
37 |
agent = ReactCodeAgent(
|