bragour commited on
Commit
d157505
1 Parent(s): ba54308

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -32
app.py CHANGED
@@ -1,51 +1,37 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
 
4
  # Initialize the inference client with the model ID
5
- client = InferenceClient(model="bragour/Camel-7b-chat")
6
 
7
  def respond(
8
  message,
9
- history: list[tuple[str, str]],
10
- system_message,
11
  max_tokens,
12
  temperature,
13
  top_p,
14
  ):
15
- messages = [{"role": "system", "content": system_message}]
16
-
17
- for user_input, assistant_output in history:
18
- if user_input:
19
- messages.append({"role": "user", "content": user_input})
20
- if assistant_output:
21
- messages.append({"role": "assistant", "content": assistant_output})
22
-
23
- messages.append({"role": "user", "content": message})
24
-
25
- response = ""
26
-
27
- # Stream the response from the API
28
- for message in client.chat_completion(
29
- messages,
30
- max_tokens=max_tokens,
31
- stream=True,
32
  temperature=temperature,
33
  top_p=top_p,
34
- ):
35
- token = message.choices[0].delta.content
36
- response += token
37
- yield response
 
38
 
39
  # Define the Gradio interface
40
- demo = gr.ChatInterface(
41
- respond,
42
- additional_inputs=[
43
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
44
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
45
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
46
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
47
  ],
 
48
  )
49
 
50
- if __name__ == "__main__":
51
- demo.launch()
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
  # Initialize the inference client with the model ID
5
+ client = pipeline(model="bragour/Camel-7b-chat")
6
 
7
  def respond(
8
  message,
 
 
9
  max_tokens,
10
  temperature,
11
  top_p,
12
  ):
13
+ # Generate the response from the API
14
+ result = client(
15
+ message,
16
+ max_new_tokens=max_tokens,
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  temperature=temperature,
18
  top_p=top_p,
19
+ )
20
+
21
+ response = result[0]['generated_text']
22
+
23
+ return response
24
 
25
  # Define the Gradio interface
26
+ demo = gr.Interface(
27
+ fn=respond,
28
+ inputs=[
29
+ gr.Textbox(lines=2, placeholder="Type your message here...", label="Your Message"),
30
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max New Tokens"),
31
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
32
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
33
  ],
34
+ outputs=gr.Textbox(label="Response"),
35
  )
36
 
37
+ if __name__ ==