Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
3 |
import torch
|
4 |
-
model_name = "ruslanmv/Medical-Llama3-8B"
|
5 |
device_map = 'auto'
|
6 |
# Check if GPU is available
|
7 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
@@ -21,11 +21,11 @@ if device.type == "cuda":
|
|
21 |
device_map=device_map
|
22 |
)
|
23 |
else:
|
24 |
-
model = AutoModelForCausalLM.from_pretrained(
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
30 |
tokenizer.pad_token = tokenizer.eos_token
|
31 |
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
3 |
import torch
|
4 |
+
model_name = "ruslanmv/Medical-Llama3-8B""
|
5 |
device_map = 'auto'
|
6 |
# Check if GPU is available
|
7 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
21 |
device_map=device_map
|
22 |
)
|
23 |
else:
|
24 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
25 |
+
|
26 |
+
# Load model directly
|
27 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
28 |
+
|
29 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
30 |
tokenizer.pad_token = tokenizer.eos_token
|
31 |
|