camparchimedes commited on
Commit
43cb2b8
1 Parent(s): 49876ba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +96 -7
app.py CHANGED
@@ -1,10 +1,101 @@
1
-
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
  """
6
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
7
- """
 
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
 
10
 
@@ -40,9 +131,6 @@ def respond(
40
  response += token
41
  yield response
42
 
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
  demo = gr.ChatInterface(
47
  respond,
48
  additional_inputs=[
@@ -61,4 +149,5 @@ demo = gr.ChatInterface(
61
 
62
 
63
  if __name__ == "__main__":
64
- demo.launch()
 
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ import pandas as pd
4
+ import json
5
+
6
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
7
+
8
+ def respond(
9
+ message,
10
+ history: list[tuple[str, str]],
11
+ system_message,
12
+ max_tokens,
13
+ temperature,
14
+ top_p,
15
+ uploaded_file,
16
+ ):
17
+ messages = [{"role": "system", "content": system_message}]
18
+
19
+ for val in history:
20
+ if val[0]:
21
+ messages.append({"role": "user", "content": val[0]})
22
+ if val[1]:
23
+ messages.append({"role": "assistant", "content": val[1]})
24
+
25
+ if uploaded_file is not None:
26
+ with open(uploaded_file.name, "r") as f:
27
+ file_content = f.read()
28
+ messages.append({"role": "user", "content": file_content})
29
+
30
+ messages.append({"role": "user", "content": message})
31
+
32
+ response = ""
33
+
34
+ for message in client.chat_completion(
35
+ messages,
36
+ max_tokens=max_tokens,
37
+ stream=True,
38
+ temperature=temperature,
39
+ top_p=top_p,
40
+ ):
41
+ token = message.choices[0].delta.content
42
+
43
+ response += token
44
+ yield response
45
+
46
+ # Process
47
+ if uploaded_file is not None:
48
+ print(f"Uploaded file: {uploaded_file.name}")
49
+
50
+ # CSV file
51
+ if uploaded_file.name.endswith(".csv"):
52
+ try:
53
+ df = pd.read_csv(uploaded_file.name)
54
+ print(f"CSV file loaded with {len(df)} rows and {len(df.columns)} columns.")
55
+ json_data = df.to_json(orient="records")
56
+ with open(f"{uploaded_file.name.split('.')[0]}.json", "w") as json_file:
57
+ json_file.write(json_data)
58
+ print(f"JSON file created: {uploaded_file.name.split('.')[0]}.json")
59
+ except Exception as e:
60
+ print(f"Error loading CSV file: {e}")
61
+
62
+ # text file
63
+ elif uploaded_file.name.endswith(".txt"):
64
+ try:
65
+ with open(uploaded_file.name, "r") as f:
66
+ text = f.read()
67
+ print(f"Text file loaded with {len(text)} characters.")
68
+ json_data = json.dumps({"text": text})
69
+ with open(f"{uploaded_file.name.split('.')[0]}.json", "w") as json_file:
70
+ json_file.write(json_data)
71
+ print(f"JSON file created: {uploaded_file.name.split('.')[0]}.json")
72
+ except Exception as e:
73
+ print(f"Error loading text file: {e}")
74
+
75
+ demo = gr.ChatInterface(
76
+ respond,
77
+ title="Nixie Steamcore, a hotbot!",
78
+ additional_inputs=[
79
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
80
+ gr.Slider(minimum=0.1, maximum=4.0, value=1.2, step=0.1, label="Temperature"),
81
+ gr.Slider(
82
+ minimum=0.1,
83
+ maximum=1.0,
84
+ value=0.95,
85
+ step=0.05,
86
+ label="Top-p (nucleus sampling)",
87
+ ),
88
+ gr.File(label="Upload a document"),
89
+ ],
90
+ )
91
+
92
+ if __name__ == "__main__":
93
+ demo.launch()
94
 
95
  """
96
+ import gradio as gr
97
+ from huggingface_hub import InferenceClient
98
+
99
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
100
 
101
 
 
131
  response += token
132
  yield response
133
 
 
 
 
134
  demo = gr.ChatInterface(
135
  respond,
136
  additional_inputs=[
 
149
 
150
 
151
  if __name__ == "__main__":
152
+ demo.launch()
153
+ """