leonardlin commited on
Commit
89eb83f
1 Parent(s): 8014467

back to this prompt

Browse files
Files changed (1) hide show
  1. app.py +5 -9
app.py CHANGED
@@ -25,7 +25,7 @@ examples = [
25
 
26
  # LLM Settings
27
  # Initial
28
- system_prompt = 'You are a helpful assistant that speaks English and 日本語. Reply in the same language as the user.'
29
  default_prompt = system_prompt
30
 
31
  tokenizer = AutoTokenizer.from_pretrained(model_name)
@@ -35,20 +35,16 @@ model = AutoModelForCausalLM.from_pretrained(
35
  device_map="auto",
36
  # load_in_8bit=True,
37
  load_in_4bit=True,
38
- quantization_config = BitsAndBytesConfig(
39
- load_in_4bit=True,
40
- bnb_4bit_quant_type='nf4',
41
- bnb_4bit_use_double_quant=True,
42
- bnb_4bit_compute_dtype=torch.bfloat16
43
- ),
44
  )
45
 
46
  def chat(message, history, system_prompt):
47
- print('---')
48
- pprint(history)
49
  if not system_prompt:
50
  system_prompt = default_prompt
51
 
 
 
 
 
52
  # Let's just rebuild every time it's easier
53
  chat_history = [{"role": "system", "content": system_prompt}]
54
  for h in history:
 
25
 
26
  # LLM Settings
27
  # Initial
28
+ system_prompt = 'You are a helpful, bilingual assistant. Reply in same language as the user.'
29
  default_prompt = system_prompt
30
 
31
  tokenizer = AutoTokenizer.from_pretrained(model_name)
 
35
  device_map="auto",
36
  # load_in_8bit=True,
37
  load_in_4bit=True,
 
 
 
 
 
 
38
  )
39
 
40
  def chat(message, history, system_prompt):
 
 
41
  if not system_prompt:
42
  system_prompt = default_prompt
43
 
44
+ print('---')
45
+ print('Prompt:', system_prompt)
46
+ pprint(history)
47
+
48
  # Let's just rebuild every time it's easier
49
  chat_history = [{"role": "system", "content": system_prompt}]
50
  for h in history: