Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
import gradio as gr
|
|
|
3 |
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
@@ -19,6 +20,8 @@ def format_prompt(message, history, system_prompt):
|
|
19 |
def generate(
|
20 |
prompt, history, system_prompt, temperature=0.9, max_new_tokens=4096, top_p=0.95, repetition_penalty=1.0,
|
21 |
):
|
|
|
|
|
22 |
temperature = float(temperature)
|
23 |
if temperature < 1e-2:
|
24 |
temperature = 1e-2
|
@@ -39,8 +42,9 @@ def generate(
|
|
39 |
|
40 |
for response in stream:
|
41 |
output += response.token.text
|
|
|
|
|
42 |
yield output
|
43 |
-
return output
|
44 |
|
45 |
def start_conversation():
|
46 |
global conversation_started
|
@@ -117,12 +121,11 @@ iface = gr.ChatInterface(
|
|
117 |
undo_btn="Deshacer",
|
118 |
clear_btn="Borrar",
|
119 |
submit_btn="Enviar",
|
120 |
-
on_submit="end_conversation", # Llama a la función end_conversation al hacer clic en "Enviar"
|
121 |
interface_height=550,
|
122 |
)
|
123 |
|
124 |
# Iniciar un hilo de conversación inicial
|
125 |
-
|
126 |
|
127 |
# Actualizar la interfaz después de la conversación inicial
|
128 |
while not conversation_started:
|
@@ -130,3 +133,4 @@ while not conversation_started:
|
|
130 |
|
131 |
# Iniciar la interfaz principal
|
132 |
iface.launch(show_api=False)
|
|
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
import gradio as gr
|
3 |
+
import threading
|
4 |
|
5 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
6 |
|
|
|
20 |
def generate(
|
21 |
prompt, history, system_prompt, temperature=0.9, max_new_tokens=4096, top_p=0.95, repetition_penalty=1.0,
|
22 |
):
|
23 |
+
global conversation_started, conversation_ongoing
|
24 |
+
|
25 |
temperature = float(temperature)
|
26 |
if temperature < 1e-2:
|
27 |
temperature = 1e-2
|
|
|
42 |
|
43 |
for response in stream:
|
44 |
output += response.token.text
|
45 |
+
if "¡Hasta luego!" in response.token.text: # Puedes ajustar este condicional según las respuestas de tu modelo
|
46 |
+
conversation_ongoing = False
|
47 |
yield output
|
|
|
48 |
|
49 |
def start_conversation():
|
50 |
global conversation_started
|
|
|
121 |
undo_btn="Deshacer",
|
122 |
clear_btn="Borrar",
|
123 |
submit_btn="Enviar",
|
|
|
124 |
interface_height=550,
|
125 |
)
|
126 |
|
127 |
# Iniciar un hilo de conversación inicial
|
128 |
+
threading.Thread(target=start_conversation).start()
|
129 |
|
130 |
# Actualizar la interfaz después de la conversación inicial
|
131 |
while not conversation_started:
|
|
|
133 |
|
134 |
# Iniciar la interfaz principal
|
135 |
iface.launch(show_api=False)
|
136 |
+
|