Spaces:
Sleeping
Sleeping
camparchimedes
commited on
Commit
•
41e855d
1
Parent(s):
d6966f7
Update app.py
Browse files
app.py
CHANGED
@@ -2,9 +2,19 @@ import gradio as gr
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
4 |
import json
|
|
|
|
|
|
|
5 |
|
6 |
-
client = InferenceClient("openchat/openchat-3.6-8b-20240522") # HuggingFaceH4/zephyr-7b-beta
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
def respond(
|
9 |
message,
|
10 |
history: list[tuple[str, str]],
|
@@ -71,6 +81,21 @@ def respond(
|
|
71 |
except Exception as e:
|
72 |
print(f"Error loading text file: {e}")
|
73 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
|
75 |
demo = gr.ChatInterface(
|
76 |
respond,
|
@@ -91,8 +116,16 @@ demo = gr.ChatInterface(
|
|
91 |
)
|
92 |
|
93 |
if __name__ == "__main__":
|
94 |
-
demo.launch()
|
95 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
"""
|
98 |
import gradio as gr
|
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
4 |
import json
|
5 |
+
import os
|
6 |
+
import re
|
7 |
+
import uuid
|
8 |
|
|
|
9 |
|
10 |
+
client = InferenceClient("tiiuae/falcon-7b-instruct") # HuggingFaceH4/zephyr-7b-beta
|
11 |
+
|
12 |
+
|
13 |
+
def trigger_example(example):
|
14 |
+
chat, updated_history = generate_response(example)
|
15 |
+
return chat, updated_history
|
16 |
+
|
17 |
+
|
18 |
def respond(
|
19 |
message,
|
20 |
history: list[tuple[str, str]],
|
|
|
81 |
except Exception as e:
|
82 |
print(f"Error loading text file: {e}")
|
83 |
|
84 |
+
def clear_chat():
|
85 |
+
return [], [], str(uuid.uuid4())
|
86 |
+
|
87 |
+
|
88 |
+
examples = [
|
89 |
+
"Explain the relativity theory in French",
|
90 |
+
"Como sair de um helicóptero que caiu na água?",
|
91 |
+
"¿Cómo le explicarías el aprendizaje automático a un extraterrestre?",
|
92 |
+
"Explain gravity to a chicken.",
|
93 |
+
"Give me an example of an endangered species and let me know what I can do to help preserve it",
|
94 |
+
"Formally introduce the transformer architecture with notation.",
|
95 |
+
|
96 |
+
]
|
97 |
+
|
98 |
+
|
99 |
|
100 |
demo = gr.ChatInterface(
|
101 |
respond,
|
|
|
116 |
)
|
117 |
|
118 |
if __name__ == "__main__":
|
119 |
+
demo.launch(debug=True)
|
120 |
|
121 |
+
"""
|
122 |
+
if __name__ == "__main__":
|
123 |
+
# demo.launch(debug=True)
|
124 |
+
try:
|
125 |
+
demo.queue(api_open=False, max_size=40).launch(show_api=False)
|
126 |
+
except Exception as e:
|
127 |
+
print(f"Error: {e}")
|
128 |
+
"""
|
129 |
|
130 |
"""
|
131 |
import gradio as gr
|