NeonBohdan commited on
Commit
8b0e392
1 Parent(s): de14443

Added conversational flag

Browse files
Files changed (1) hide show
  1. app.py +9 -0
app.py CHANGED
@@ -16,10 +16,18 @@ client = OpenAI(
16
  def respond(
17
  message,
18
  history: List[Tuple[str, str]],
 
19
  max_tokens,
20
  ):
21
  messages = []
22
 
 
 
 
 
 
 
 
23
  messages.append({"role": "user", "content": message})
24
 
25
  completion = client.chat.completions.create(
@@ -40,6 +48,7 @@ def respond(
40
  demo = gr.ChatInterface(
41
  respond,
42
  additional_inputs=[
 
43
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
44
  ],
45
  title="NeonLLM (v2024-05-15)",
 
16
  def respond(
17
  message,
18
  history: List[Tuple[str, str]],
19
+ conversational,
20
  max_tokens,
21
  ):
22
  messages = []
23
 
24
+ if conversational:
25
+ for val in history[-2:]:
26
+ if val[0]:
27
+ messages.append({"role": "user", "content": val[0]})
28
+ if val[1]:
29
+ messages.append({"role": "assistant", "content": val[1]})
30
+
31
  messages.append({"role": "user", "content": message})
32
 
33
  completion = client.chat.completions.create(
 
48
  demo = gr.ChatInterface(
49
  respond,
50
  additional_inputs=[
51
+ gr.Checkbox(value=True, label="conversational"),
52
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
53
  ],
54
  title="NeonLLM (v2024-05-15)",