mahiatlinux commited on
Commit
846f032
1 Parent(s): e325923

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  from threading import Thread
3
  from typing import Iterator
@@ -41,12 +42,12 @@ def generate(
41
  top_k: int = 50,
42
  repetition_penalty: float = 1.2,
43
  ) -> Iterator[str]:
44
- conversation = ""
45
  if system_prompt:
46
- conversation += f"system: You are an AI assistant.\n"
47
  for user, assistant in chat_history:
48
- conversation += f"user: {user}\nassistant: {assistant}\n"
49
- conversation += f"user: {message}\n"
50
 
51
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt", add_generation_prompt=True)
52
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
@@ -130,4 +131,4 @@ with gr.Blocks(css="style.css") as demo:
130
  chat_interface.render()
131
 
132
  if __name__ == "__main__":
133
- demo.queue(max_size=20).launch()
 
1
+ User
2
  import os
3
  from threading import Thread
4
  from typing import Iterator
 
42
  top_k: int = 50,
43
  repetition_penalty: float = 1.2,
44
  ) -> Iterator[str]:
45
+ conversation = []
46
  if system_prompt:
47
+ conversation.append({"from": "human", "value": "You are an AI assistant."})
48
  for user, assistant in chat_history:
49
+ conversation.extend([{"from": "human", "value": user}, {"from": "gpt", "value": assistant}])
50
+ conversation.append({"from": "human", "value": message})
51
 
52
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt", add_generation_prompt=True)
53
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
 
131
  chat_interface.render()
132
 
133
  if __name__ == "__main__":
134
+ demo.queue(max_size=20).launch()