nxphi47 commited on
Commit
39e1f80
1 Parent(s): f7d8607

Update multipurpose_chatbot/demos/chat_interface.py

Browse files
multipurpose_chatbot/demos/chat_interface.py CHANGED
@@ -126,8 +126,8 @@ def format_conversation(history, system_prompt=None):
126
  def chat_response_stream_multiturn_engine(
127
  message: str,
128
  history: List[Tuple[str, str]],
129
- temperature: float,
130
- max_tokens: int,
131
  system_prompt: Optional[str] = SYSTEM_PROMPT,
132
  ):
133
  global MODEL_ENGINE
@@ -688,14 +688,14 @@ class ChatInterfaceDemo(BaseDemo):
688
  submit_btn=gr.Button(value='Submit', variant="primary", scale=0),
689
  title=title,
690
  description=description,
691
- additional_inputs=[
692
- gr.Number(value=temperature, label='Temperature (higher -> more random)'),
693
- gr.Number(value=max_tokens, label='Max generated tokens (increase if want more generation)'),
694
- # gr.Number(value=frequence_penalty, label='Frequency penalty (> 0 encourage new tokens over repeated tokens)'),
695
- # gr.Number(value=presence_penalty, label='Presence penalty (> 0 encourage new tokens, < 0 encourage existing tokens)'),
696
- gr.Textbox(value=system_prompt, label='System prompt', lines=4)
697
- ],
698
- examples=CHAT_EXAMPLES,
699
  cache_examples=False
700
  )
701
  return demo_chat
 
126
  def chat_response_stream_multiturn_engine(
127
  message: str,
128
  history: List[Tuple[str, str]],
129
+ temperature: Optional[float] = 0.7,
130
+ max_tokens: Optional[int] = 2048,
131
  system_prompt: Optional[str] = SYSTEM_PROMPT,
132
  ):
133
  global MODEL_ENGINE
 
688
  submit_btn=gr.Button(value='Submit', variant="primary", scale=0),
689
  title=title,
690
  description=description,
691
+ # additional_inputs=[
692
+ # gr.Number(value=temperature, label='Temperature (higher -> more random)'),
693
+ # gr.Number(value=max_tokens, label='Max generated tokens (increase if want more generation)'),
694
+ # # gr.Number(value=frequence_penalty, label='Frequency penalty (> 0 encourage new tokens over repeated tokens)'),
695
+ # # gr.Number(value=presence_penalty, label='Presence penalty (> 0 encourage new tokens, < 0 encourage existing tokens)'),
696
+ # gr.Textbox(value=system_prompt, label='System prompt', lines=4)
697
+ # ],
698
+ # examples=CHAT_EXAMPLES,
699
  cache_examples=False
700
  )
701
  return demo_chat