Update README.md
Browse files
README.md
CHANGED
@@ -44,10 +44,10 @@ import torch
|
|
44 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
|
45 |
|
46 |
# Let's bring in the big guns! Our super cool HelpingAI-3B model
|
47 |
-
model = AutoModelForCausalLM.from_pretrained("OEvortex/HelpingAI-3B-
|
48 |
|
49 |
# We also need the special HelpingAI translator to understand our chats
|
50 |
-
tokenizer = AutoTokenizer.from_pretrained("OEvortex/HelpingAI-3B-
|
51 |
|
52 |
# This TextStreamer thingy is our secret weapon for super smooth conversation flow
|
53 |
streamer = TextStreamer(tokenizer)
|
|
|
44 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
|
45 |
|
46 |
# Let's bring in the big guns! Our super cool HelpingAI-3B model
|
47 |
+
model = AutoModelForCausalLM.from_pretrained("OEvortex/HelpingAI-3B-chat", trust_remote_code=True, torch_dtype=torch.float16).to("cuda")
|
48 |
|
49 |
# We also need the special HelpingAI translator to understand our chats
|
50 |
+
tokenizer = AutoTokenizer.from_pretrained("OEvortex/HelpingAI-3B-chat", trust_remote_code=True, torch_dtype=torch.float16)
|
51 |
|
52 |
# This TextStreamer thingy is our secret weapon for super smooth conversation flow
|
53 |
streamer = TextStreamer(tokenizer)
|