Update README.md
Browse files
README.md
CHANGED
@@ -109,8 +109,8 @@ import torch
|
|
109 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
110 |
|
111 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
112 |
-
tokenizer = AutoTokenizer.from_pretrained("/
|
113 |
-
model = AutoModelForCausalLM.from_pretrained("/
|
114 |
|
115 |
prompt = '<s> <|sys_start|> You are an AI assistant. You will be given a task. You must generate a detailed and long answer. <|sys_end|> <|im_start|> Write a python function that takes a list of integers and returns the squared sum of the list. <|im_end|>'
|
116 |
|
|
|
109 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
110 |
|
111 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
112 |
+
tokenizer = AutoTokenizer.from_pretrained("LLM360/CrystalChat", trust_remote_code=True)
|
113 |
+
model = AutoModelForCausalLM.from_pretrained("LLM360/CrystalChat", trust_remote_code=True).to(device)
|
114 |
|
115 |
prompt = '<s> <|sys_start|> You are an AI assistant. You will be given a task. You must generate a detailed and long answer. <|sys_end|> <|im_start|> Write a python function that takes a list of integers and returns the squared sum of the list. <|im_end|>'
|
116 |
|