Spaces:
Sleeping
Sleeping
Commit
·
71fb85e
1
Parent(s):
7b971c7
Add application file
Browse files
app.py
CHANGED
@@ -6,8 +6,8 @@ from threading import Thread
|
|
6 |
#tokenizer = AutoTokenizer.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1")
|
7 |
#model = AutoModelForCausalLM.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1", torch_dtype=torch.float16)
|
8 |
|
9 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
10 |
-
model = AutoModelForCausalLM.from_pretrained("
|
11 |
|
12 |
class StopOnTokens(StoppingCriteria):
|
13 |
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
|
|
|
6 |
#tokenizer = AutoTokenizer.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1")
|
7 |
#model = AutoModelForCausalLM.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1", torch_dtype=torch.float16)
|
8 |
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("bartowski/OpenBioLLM-Llama3-8B-GGUF")
|
10 |
+
model = AutoModelForCausalLM.from_pretrained("bartowski/OpenBioLLM-Llama3-8B-Q8_0.gguf", torch_dtype=torch.float16)
|
11 |
|
12 |
class StopOnTokens(StoppingCriteria):
|
13 |
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
|