Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -245,11 +245,11 @@ import torch
245
  from transformers import AutoModelForCausalLM, AutoTokenizer
246
  from transformers.generation.utils import GenerationConfig
247
 
248
- tokenizer = AutoTokenizer.from_pretrained("OrionStarAI/Orion-14B", use_fast=False, trust_remote_code=True)
249
- model = AutoModelForCausalLM.from_pretrained("OrionStarAI/Orion-14B", device_map="auto",
250
  torch_dtype=torch.bfloat16, trust_remote_code=True)
251
 
252
- model.generation_config = GenerationConfig.from_pretrained("OrionStarAI/Orion-14B")
253
  messages = [{"role": "user", "content": "Hello, what is your name? "}]
254
  response = model.chat(tokenizer, messages, streaming=False)
255
  print(response)
 
245
  from transformers import AutoModelForCausalLM, AutoTokenizer
246
  from transformers.generation.utils import GenerationConfig
247
 
248
+ tokenizer = AutoTokenizer.from_pretrained("OrionStarAI/Orion-14B-Chat", use_fast=False, trust_remote_code=True)
249
+ model = AutoModelForCausalLM.from_pretrained("OrionStarAI/Orion-14B-Chat", device_map="auto",
250
  torch_dtype=torch.bfloat16, trust_remote_code=True)
251
 
252
+ model.generation_config = GenerationConfig.from_pretrained("OrionStarAI/Orion-14B-Chat")
253
  messages = [{"role": "user", "content": "Hello, what is your name? "}]
254
  response = model.chat(tokenizer, messages, streaming=False)
255
  print(response)