conversation context
Browse files
app.py
CHANGED
@@ -144,7 +144,7 @@ async def delete_conversation(api_key: str, conversation_id: int) -> str:
|
|
144 |
logging.error(f"Error deleting conversation {conversation_id}: {str(e)}")
|
145 |
return f"Failed to delete conversation: {str(e)}"
|
146 |
|
147 |
-
client = InferenceClient("
|
148 |
|
149 |
def respond(
|
150 |
message: str,
|
@@ -225,6 +225,7 @@ with gr.Blocks() as demo:
|
|
225 |
delete_button = gr.Button("Delete Conversation", visible=False)
|
226 |
|
227 |
selected_conversation_id = gr.State(None)
|
|
|
228 |
|
229 |
async def load_conversations(api_key):
|
230 |
try:
|
@@ -246,23 +247,22 @@ with gr.Blocks() as demo:
|
|
246 |
logging.info(f"Updating conversation with ID: {conversation_id}")
|
247 |
|
248 |
# Return a loading message immediately
|
249 |
-
yield gr.update(value="Loading conversation details...", visible=True), gr.update(visible=False), None
|
250 |
|
251 |
# Fetch and format the conversation
|
252 |
formatted_conversation = await display_conversation(api_key, conversation_id)
|
253 |
|
254 |
# Return the formatted conversation and update the UI
|
255 |
-
yield formatted_conversation, gr.update(visible=True), conversation_id
|
256 |
except Exception as e:
|
257 |
error_message = f"Error updating conversation: {str(e)}"
|
258 |
logging.error(error_message)
|
259 |
-
yield error_message, gr.update(visible=False), None
|
260 |
|
261 |
conversation_table.select(
|
262 |
update_conversation,
|
263 |
inputs=[api_key],
|
264 |
-
outputs=[conversation_details, delete_button, selected_conversation_id],
|
265 |
-
# _js="(api_key, evt) => [api_key, evt]", # This ensures the evt object is passed correctly
|
266 |
)
|
267 |
# .then(
|
268 |
# lambda: None, # This is a no-op function
|
@@ -300,10 +300,10 @@ with gr.Blocks() as demo:
|
|
300 |
respond,
|
301 |
additional_inputs=[
|
302 |
gr.Textbox(value="You are a friendly Chatbot. Analyze and discuss the given conversation context.", label="System message"),
|
303 |
-
gr.Slider(minimum=1, maximum=2048, value=
|
304 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
305 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
|
306 |
-
|
307 |
],
|
308 |
)
|
309 |
|
|
|
144 |
logging.error(f"Error deleting conversation {conversation_id}: {str(e)}")
|
145 |
return f"Failed to delete conversation: {str(e)}"
|
146 |
|
147 |
+
client = InferenceClient("Qwen/Qwen2.5-14B-Instruct")
|
148 |
|
149 |
def respond(
|
150 |
message: str,
|
|
|
225 |
delete_button = gr.Button("Delete Conversation", visible=False)
|
226 |
|
227 |
selected_conversation_id = gr.State(None)
|
228 |
+
conversation_context = gr.State("")
|
229 |
|
230 |
async def load_conversations(api_key):
|
231 |
try:
|
|
|
247 |
logging.info(f"Updating conversation with ID: {conversation_id}")
|
248 |
|
249 |
# Return a loading message immediately
|
250 |
+
yield gr.update(value="Loading conversation details...", visible=True), gr.update(visible=False), None, None
|
251 |
|
252 |
# Fetch and format the conversation
|
253 |
formatted_conversation = await display_conversation(api_key, conversation_id)
|
254 |
|
255 |
# Return the formatted conversation and update the UI
|
256 |
+
yield formatted_conversation, gr.update(visible=True), conversation_id, formatted_conversation
|
257 |
except Exception as e:
|
258 |
error_message = f"Error updating conversation: {str(e)}"
|
259 |
logging.error(error_message)
|
260 |
+
yield error_message, gr.update(visible=False), None, None
|
261 |
|
262 |
conversation_table.select(
|
263 |
update_conversation,
|
264 |
inputs=[api_key],
|
265 |
+
outputs=[conversation_details, delete_button, selected_conversation_id, conversation_context],
|
|
|
266 |
)
|
267 |
# .then(
|
268 |
# lambda: None, # This is a no-op function
|
|
|
300 |
respond,
|
301 |
additional_inputs=[
|
302 |
gr.Textbox(value="You are a friendly Chatbot. Analyze and discuss the given conversation context.", label="System message"),
|
303 |
+
gr.Slider(minimum=1, maximum=2048, value=2048, step=1, label="Max new tokens"),
|
304 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
305 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
|
306 |
+
conversation_context
|
307 |
],
|
308 |
)
|
309 |
|