Update app.py
Browse files
app.py
CHANGED
@@ -7,8 +7,8 @@ import time
|
|
7 |
import sys
|
8 |
import os
|
9 |
from transformers import pipeline
|
10 |
-
p = pipeline("automatic-speech-recognition")
|
11 |
|
|
|
12 |
|
13 |
os.environ["OPENAI_API_KEY"] = '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'
|
14 |
|
@@ -25,20 +25,20 @@ css = """
|
|
25 |
div.svelte-1frtwj3 {
|
26 |
display: inline-flex;
|
27 |
align-items: center;}
|
28 |
-
|
29 |
div.float.svelte-1frtwj3 {
|
30 |
position: absolute;
|
31 |
opacity: 0;
|
32 |
top: var(--block-label-margin);
|
33 |
left: var(--block-label-margin);}
|
34 |
-
|
35 |
.wrap.svelte-6roggh.svelte-6roggh {
|
36 |
adding: var(--block-padding);
|
37 |
height: 100%;
|
38 |
max-height: 800px;
|
39 |
overflow-y: auto;
|
40 |
}
|
41 |
-
|
42 |
.bot.svelte-6roggh.svelte-6roggh, .pending.svelte-6roggh.svelte-6roggh {
|
43 |
border-color: var(--border-color-accent);
|
44 |
background-color: var(--color-accent-soft);
|
@@ -111,17 +111,20 @@ div.bot.svelte-17nzccn.svelte-17nzccn {
|
|
111 |
div.user.svelte-6roggh.svelte-6roggh {
|
112 |
background: #5F0000;
|
113 |
color: white;
|
114 |
-
|
115 |
-
|
116 |
}
|
117 |
div.user.svelte-17nzccn.svelte-17nzccn {
|
118 |
background: #5F0000;
|
119 |
}
|
120 |
"""
|
121 |
|
|
|
122 |
def transcribe(audio):
|
123 |
text = p(audio)["text"]
|
124 |
return text
|
|
|
|
|
125 |
def construct_index(directory_path):
|
126 |
max_input_size = 100000000
|
127 |
num_outputs = 1000000000
|
@@ -141,66 +144,86 @@ def construct_index(directory_path):
|
|
141 |
|
142 |
|
143 |
def chatbot(input_text):
|
144 |
-
|
145 |
index = GPTSimpleVectorIndex.load_from_disk('index.json')
|
146 |
response = index.query(input_text)
|
147 |
return str(response.response)
|
148 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
149 |
with gr.Blocks(css=css) as demo:
|
150 |
realPath = str(os.path.dirname(os.path.realpath(__file__)))
|
151 |
-
img1 = gr.Image("images/header.png", elem_classes=".img.svelte-ms5bsk", elem_id="img.svelte-ms5bsk").style(
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
|
|
|
|
|
|
|
|
158 |
def respond(message, chat_history):
|
159 |
chat_history.append((message, chatbot(message)))
|
160 |
time.sleep(1)
|
161 |
vetor = []
|
162 |
realPath = str(os.path.dirname(os.path.realpath(__file__)))
|
163 |
|
164 |
-
if str(message).upper()=="OLA" or str(message).upper()=="OLÁ" or str(message).upper()=="OI":
|
165 |
vetor = vetor + [((realPath + "\\images\\hippo-apresentacao.mp4",), "")]
|
166 |
-
elif str(message).upper() == "VINHO CASA DEL RONCO PINOT GRIGIO"
|
167 |
-
|
168 |
-
elif str(message).upper() == "SURVIVOR CHENIN BLANC"
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
elif str(message).upper() == "VINHO PORTO NOVA VERDE"
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
elif str(message).upper() == "VINHO QUINTA DO PINTO ARINTO BRANCO"
|
177 |
-
|
178 |
-
elif str(message).upper() == "VINHO 1492 CHARDONNAY"
|
179 |
-
|
180 |
-
elif str(message).upper() == "ME SUGIRA UM VINHO TINTO BOM COM QUEIJO"
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
elif str(message).upper() == "VINHO BOM COM CHOCOLATE"
|
185 |
-
|
186 |
-
elif str(message).upper() == "VINHO BOM COM PEIXE"
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
elif str(message).upper() == "VINHAS DO LASSO COLHEITA SELECIONADA"
|
191 |
-
|
192 |
-
elif str(message).upper() == "DOM CAMPOS MOSCATEL"
|
193 |
-
|
194 |
-
elif str(message).upper() == "BECAS ROSE MEIO SECO"
|
195 |
-
|
196 |
-
elif str(message).upper() == "PORTA DA RAVESSA"
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
msg.submit(respond, [msg, gpt], [msg,gpt])
|
204 |
|
205 |
index = construct_index("docs")
|
206 |
demo.launch()
|
|
|
7 |
import sys
|
8 |
import os
|
9 |
from transformers import pipeline
|
|
|
10 |
|
11 |
+
p = pipeline("automatic-speech-recognition")
|
12 |
|
13 |
os.environ["OPENAI_API_KEY"] = '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'
|
14 |
|
|
|
25 |
div.svelte-1frtwj3 {
|
26 |
display: inline-flex;
|
27 |
align-items: center;}
|
28 |
+
|
29 |
div.float.svelte-1frtwj3 {
|
30 |
position: absolute;
|
31 |
opacity: 0;
|
32 |
top: var(--block-label-margin);
|
33 |
left: var(--block-label-margin);}
|
34 |
+
|
35 |
.wrap.svelte-6roggh.svelte-6roggh {
|
36 |
adding: var(--block-padding);
|
37 |
height: 100%;
|
38 |
max-height: 800px;
|
39 |
overflow-y: auto;
|
40 |
}
|
41 |
+
|
42 |
.bot.svelte-6roggh.svelte-6roggh, .pending.svelte-6roggh.svelte-6roggh {
|
43 |
border-color: var(--border-color-accent);
|
44 |
background-color: var(--color-accent-soft);
|
|
|
111 |
div.user.svelte-6roggh.svelte-6roggh {
|
112 |
background: #5F0000;
|
113 |
color: white;
|
114 |
+
|
115 |
+
|
116 |
}
|
117 |
div.user.svelte-17nzccn.svelte-17nzccn {
|
118 |
background: #5F0000;
|
119 |
}
|
120 |
"""
|
121 |
|
122 |
+
|
123 |
def transcribe(audio):
|
124 |
text = p(audio)["text"]
|
125 |
return text
|
126 |
+
|
127 |
+
|
128 |
def construct_index(directory_path):
|
129 |
max_input_size = 100000000
|
130 |
num_outputs = 1000000000
|
|
|
144 |
|
145 |
|
146 |
def chatbot(input_text):
|
|
|
147 |
index = GPTSimpleVectorIndex.load_from_disk('index.json')
|
148 |
response = index.query(input_text)
|
149 |
return str(response.response)
|
150 |
|
151 |
+
def chat(MSGS, MaxToken=50, outputs=3):
|
152 |
+
# We use the Chat Completion endpoint for chat like inputs
|
153 |
+
response = OpenAI.ChatCompletion.create(
|
154 |
+
# model used here is ChatGPT
|
155 |
+
# You can use all these models for this endpoint:
|
156 |
+
# gpt-4, gpt-4-0314, gpt-4-32k, gpt-4-32k-0314,
|
157 |
+
# gpt-3.5-turbo, gpt-3.5-turbo-0301
|
158 |
+
model="gpt-3.5-turbo",
|
159 |
+
messages=MSGS,
|
160 |
+
# max_tokens generated by the AI model
|
161 |
+
# maximu value can be 4096 tokens for "gpt-3.5-turbo"
|
162 |
+
max_tokens = MaxToken,
|
163 |
+
# number of output variations to be generated by AI model
|
164 |
+
n = outputs,
|
165 |
+
)
|
166 |
+
return response.choices[0].message
|
167 |
+
|
168 |
with gr.Blocks(css=css) as demo:
|
169 |
realPath = str(os.path.dirname(os.path.realpath(__file__)))
|
170 |
+
img1 = gr.Image("images/header.png", elem_classes=".img.svelte-ms5bsk", elem_id="img.svelte-ms5bsk").style(
|
171 |
+
container=False)
|
172 |
+
gpt = gr.Chatbot(label=".", elem_classes=".wrap.svelte-1o68geq.svelte-1o68geq", elem_id="chat").style(
|
173 |
+
container=True)
|
174 |
+
msg = gr.Textbox(elem_id="div.svelte-awbtu4", elem_classes="textBoxBot", show_label=False,
|
175 |
+
placeholder="Bem vindo ao assistente virtual OM30, em que posso ajuda-lo?",
|
176 |
+
).style(container=False)
|
177 |
+
|
178 |
+
|
179 |
+
# clear = gr.Button("Limpar Conversa")
|
180 |
+
# gr.Audio(source="microphone", type="filepath",label="ESTÁ COM DIFICULDADES EM ESCREVER? CLIQUE E ME DIGA O QUE DESEJA")
|
181 |
def respond(message, chat_history):
|
182 |
chat_history.append((message, chatbot(message)))
|
183 |
time.sleep(1)
|
184 |
vetor = []
|
185 |
realPath = str(os.path.dirname(os.path.realpath(__file__)))
|
186 |
|
187 |
+
if str(message).upper() == "OLA" or str(message).upper() == "OLÁ" or str(message).upper() == "OI":
|
188 |
vetor = vetor + [((realPath + "\\images\\hippo-apresentacao.mp4",), "")]
|
189 |
+
elif str(message).upper() == "VINHO CASA DEL RONCO PINOT GRIGIO":
|
190 |
+
vetor = vetor + [((realPath + "\\images\\casa-del-ronco-branco.png",), "")]
|
191 |
+
elif str(message).upper() == "SURVIVOR CHENIN BLANC":
|
192 |
+
vetor = vetor + [((realPath + "\\images\\survivor-branco.png",), "")]
|
193 |
+
vetor = vetor + [((realPath + "\\images\\survivor.mp4",), "")]
|
194 |
+
|
195 |
+
elif str(message).upper() == "VINHO PORTO NOVA VERDE":
|
196 |
+
vetor = vetor + [((realPath + "\\images\\porta-nova-branco.jpg",), "")]
|
197 |
+
vetor = vetor + [((realPath + "\\images\\porta-nova-verde.mp4",), "")]
|
198 |
+
|
199 |
+
elif str(message).upper() == "VINHO QUINTA DO PINTO ARINTO BRANCO":
|
200 |
+
vetor = vetor + [((realPath + "\\images\\quinta-pinto-arinto-branco.png",), "")]
|
201 |
+
elif str(message).upper() == "VINHO 1492 CHARDONNAY":
|
202 |
+
vetor = vetor + [((realPath + "\\images\\chardonay-branco.jpg",), "")]
|
203 |
+
elif str(message).upper() == "ME SUGIRA UM VINHO TINTO BOM COM QUEIJO":
|
204 |
+
vetor = vetor + [((realPath + "\\images\\TNT-CABERNET.png",), "")]
|
205 |
+
vetor = vetor + [((realPath + "\\images\\vinho-queijo.mp4",), "")]
|
206 |
+
|
207 |
+
elif str(message).upper() == "VINHO BOM COM CHOCOLATE":
|
208 |
+
vetor = vetor + [((realPath + "\\images\\TNT-CABERNET.png",), "")]
|
209 |
+
elif str(message).upper() == "VINHO BOM COM PEIXE":
|
210 |
+
vetor = vetor + [((realPath + "\\images\\luson-branco.png",), "")]
|
211 |
+
vetor = vetor + [((realPath + "\\images\\vinho-peixe.mp4",), "")]
|
212 |
+
|
213 |
+
elif str(message).upper() == "VINHAS DO LASSO COLHEITA SELECIONADA":
|
214 |
+
vetor = vetor + [((realPath + "\\images\\lasso-colheita-rose.png",), "")]
|
215 |
+
elif str(message).upper() == "DOM CAMPOS MOSCATEL":
|
216 |
+
vetor = vetor + [((realPath + "\\images\\dom-campos-rose.png",), "")]
|
217 |
+
elif str(message).upper() == "BECAS ROSE MEIO SECO":
|
218 |
+
vetor = vetor + [((realPath + "\\images\\becas-rose.png",), "")]
|
219 |
+
elif str(message).upper() == "PORTA DA RAVESSA":
|
220 |
+
vetor = vetor + [((realPath + "\\images\\luson-branco.png",), "")]
|
221 |
+
|
222 |
+
return "", chat_history + vetor
|
223 |
+
|
224 |
+
|
225 |
+
# clear.click(lambda:None, None, gpt, queue=False,)
|
226 |
+
msg.submit(respond, [msg, gpt], [msg, gpt])
|
227 |
|
228 |
index = construct_index("docs")
|
229 |
demo.launch()
|