phuongnv commited on
Commit
7bac83a
1 Parent(s): f3cfd6d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -5
app.py CHANGED
@@ -4,7 +4,7 @@ from huggingface_hub import InferenceClient
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
 
10
  def respond(
@@ -14,6 +14,7 @@ def respond(
14
  max_tokens,
15
  temperature,
16
  top_p,
 
17
  ):
18
  messages = [{"role": "system", "content": system_message}]
19
 
@@ -33,6 +34,9 @@ def respond(
33
  stream=True,
34
  temperature=temperature,
35
  top_p=top_p,
 
 
 
36
  ):
37
  token = message.choices[0].delta.content
38
 
@@ -47,17 +51,18 @@ demo = gr.ChatInterface(
47
  additional_inputs=[
48
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
49
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
50
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
 
51
  gr.Slider(
52
  minimum=0.1,
53
  maximum=1.0,
54
- value=0.95,
55
  step=0.05,
56
- label="Top-p (nucleus sampling)",
57
  ),
58
  ],
59
  )
60
 
61
 
62
  if __name__ == "__main__":
63
- demo.launch()
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
+ client = InferenceClient("model.guff")
8
 
9
 
10
  def respond(
 
14
  max_tokens,
15
  temperature,
16
  top_p,
17
+ top_k
18
  ):
19
  messages = [{"role": "system", "content": system_message}]
20
 
 
34
  stream=True,
35
  temperature=temperature,
36
  top_p=top_p,
37
+ top_k=top_k,
38
+ num_beams=10,
39
+ num_return_sequences=10
40
  ):
41
  token = message.choices[0].delta.content
42
 
 
51
  additional_inputs=[
52
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
53
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
54
+ gr.Slider(minimum=0.1, maximum=4.0, value=1.0, step=0.1, label="Temperature"),
55
+ gr.Slider(minimum=1, maximum=100, value=50, step=1, label="Top_k"),
56
  gr.Slider(
57
  minimum=0.1,
58
  maximum=1.0,
59
+ value=1.0,
60
  step=0.05,
61
+ label="Top_p (nucleus sampling)",
62
  ),
63
  ],
64
  )
65
 
66
 
67
  if __name__ == "__main__":
68
+ demo.launch(share=True)