shiyemin2 commited on
Commit
cb82b91
1 Parent(s): f104fde

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +2 -1
model.py CHANGED
@@ -4,7 +4,7 @@ from typing import Iterator
4
  import torch
5
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
6
 
7
- model_id = 'meta-llama/Llama-2-7b-chat-hf'
8
 
9
  if torch.cuda.is_available():
10
  model = AutoModelForCausalLM.from_pretrained(
@@ -17,6 +17,7 @@ else:
17
  tokenizer = AutoTokenizer.from_pretrained(model_id)
18
 
19
 
 
20
  def get_prompt(message: str, chat_history: list[tuple[str, str]],
21
  system_prompt: str) -> str:
22
  texts = [f'[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n']
 
4
  import torch
5
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
6
 
7
+ model_id = 'LinkSoul/Chinese-Llama-2-7b'
8
 
9
  if torch.cuda.is_available():
10
  model = AutoModelForCausalLM.from_pretrained(
 
17
  tokenizer = AutoTokenizer.from_pretrained(model_id)
18
 
19
 
20
+
21
  def get_prompt(message: str, chat_history: list[tuple[str, str]],
22
  system_prompt: str) -> str:
23
  texts = [f'[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n']