cd2-ai / app.py
brurei's picture
Update app.py
f83142c
# coding=utf8
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
import gradio as gr
import random
import time
import sys
import os
from transformers import pipeline
p = pipeline("automatic-speech-recognition")
os.environ["OPENAI_API_KEY"] = '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'
css = """
#component-2 {position: absolute; bottom: 0; width: 100%;
}
div.float.svelte-1frtwj3 {
position: absolute;
opacity: 0;
top: var(--block-label-margin);
left: var(--block-label-margin);}
.wrap.svelte-6roggh.svelte-6roggh {
padding: var(--block-padding);
height: 100%;
max-height: 100%;
overflow-y: auto;
}
.bot.svelte-6roggh.svelte-6roggh, .pending.svelte-6roggh.svelte-6roggh {
border-color: #759ce9;
background: #ffffff;
}
div.svelte-1frtwj3 {
display: inline-flex;
align-items: center;
z-index: var(--layer-2);
box-shadow: var(--block-shadow);
border: var(--block-label-border-width) solid #ffffff;
border-top: none;
border-left: none;
border-radius: var(--block-label-radius);
background: #eff6ff;
padding: var(--block-label-padding);
pointer-events: none;
color: var(--block-label-text-color);
font-weight: var(--block-label-text-weight);
width: 100%;
line-height: var(--line-sm);
}
div.svelte-awbtu4 {
display: flex;
flex-direction: inherit;
flex-wrap: wrap;
gap: var(--form-gap-width);
box-shadow: var(--block-shadow);
border: var(--block-border-width) solid var(--border-color-primary);
border-radius: var(--radius-lg);
background: var(--border-color-primary);
overflow: hidden;
position: fixed;
bottom: 0;
margin-left: -16px;
}
img.svelte-ms5bsk {
width: var(--size-full);
height: 90px;
object-fit: contain;
}
.app.svelte-ac4rv4.svelte-ac4rv4 {
max-width: none;
background-color: #ffffff;
}
.app.svelte-ac4rv4.svelte-ac4rv4{max-width:none}
.wrap.svelte-1o68geq.svelte-1o68geq {max-height: none}
.block.svelte-mppz8v {
position: relative;
margin: 0;
box-shadow: var(--block-shadow);
border-width: var(--block-border-width);
border-color: #dbeafe;
border-radius: var(--block-radius);
background: #dbeafe;
width: 100%;
line-height: var(--line-sm);
}
"""
md = """This is some code:
hello
```py
def fn(x, y, z):
print(x, y, z)
"""
def transcribe(audio):
text = p(audio)["text"]
return text
def construct_index(directory_path):
max_input_size = 10000
num_outputs = 10000
max_chunk_overlap = 20000
chunk_size_limit = 600000
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.0, model_name="text-davinci-003", max_tokens=num_outputs))
documents = SimpleDirectoryReader(directory_path).load_data()
index = GPTSimpleVectorIndex.from_documents(documents)
index.save_to_disk('index.json')
return index
def chatbot(input_text):
index = GPTSimpleVectorIndex.load_from_disk('index.json')
response = index.query(input_text)
return str(response.response)
with gr.Blocks(css=css, elem_classes=".app.svelte-ac4rv4.svelte-ac4rv4") as demo:
realPath = str(os.path.dirname(os.path.realpath(__file__)))
img1 = gr.Image("images/imagem teste.png")
gpt = gr.Chatbot(label="Converse com a Zoh",elem_classes=".wrap.svelte-1o68geq.svelte-1o68geq", elem_id="chatbot").style(container=True)
msg = gr.Textbox(elem_id="div.svelte-awbtu4",elem_classes="div.svelte-awbtu4", show_label=False,
placeholder="Bem vindo ao Hippo Supermercados, em que posso ajuda-lo?",
).style(container=False)
#clear = gr.Button("Limpar Conversa")
#gr.Audio(source="microphone", type="filepath",label="ESTÁ COM DIFICULDADES EM ESCREVER? CLIQUE E ME DIGA O QUE DESEJA")
def respond(message, chat_history):
chat_history.append((message, chatbot(message)))
time.sleep(1)
vetor = []
realPath = str(os.path.dirname(os.path.realpath(__file__)))
if str(message).upper()=="OLA" or str(message).upper()=="OLÁ" or str(message).upper()=="OI":
vetor = vetor + [((realPath + "\\images\\hippo-apresentacao.mp4",), "")]
elif str(message).upper() == "VINHO CASA DEL RONCO PINOT GRIGIO" :
vetor = vetor + [((realPath + "\\images\\casa-del-ronco-branco.png",), "")]
elif str(message).upper() == "SURVIVOR CHENIN BLANC" :
vetor = vetor + [((realPath + "\\images\\survivor-branco.png",), "")]
vetor = vetor + [((realPath + "\\images\\survivor.mp4",), "")]
elif str(message).upper() == "VINHO PORTO NOVA VERDE" :
vetor = vetor + [((realPath + "\\images\\porta-nova-branco.jpg",), "")]
vetor = vetor + [((realPath + "\\images\\porta-nova-verde.mp4",), "")]
elif str(message).upper() == "VINHO QUINTA DO PINTO ARINTO BRANCO" :
vetor = vetor + [((realPath + "\\images\\quinta-pinto-arinto-branco.png",), "")]
elif str(message).upper() == "VINHO 1492 CHARDONNAY" :
vetor = vetor + [((realPath + "\\images\\chardonay-branco.jpg",), "")]
elif str(message).upper() == "ME SUGIRA UM VINHO TINTO BOM COM QUEIJO" :
vetor = vetor + [((realPath + "\\images\\TNT-CABERNET.png",), "")]
vetor = vetor + [((realPath + "\\images\\vinho-queijo.mp4",), "")]
elif str(message).upper() == "VINHO BOM COM CHOCOLATE" :
vetor = vetor + [((realPath + "\\images\\TNT-CABERNET.png",), "")]
elif str(message).upper() == "VINHO BOM COM PEIXE" :
vetor = vetor + [((realPath + "\\images\\luson-branco.png",), "")]
vetor = vetor + [((realPath + "\\images\\vinho-peixe.mp4",), "")]
elif str(message).upper() == "VINHAS DO LASSO COLHEITA SELECIONADA" :
vetor = vetor + [((realPath + "\\images\\lasso-colheita-rose.png",), "")]
elif str(message).upper() == "DOM CAMPOS MOSCATEL" :
vetor = vetor + [((realPath + "\\images\\dom-campos-rose.png",), "")]
elif str(message).upper() == "BECAS ROSE MEIO SECO" :
vetor = vetor + [((realPath + "\\images\\becas-rose.png",), "")]
elif str(message).upper() == "PORTA DA RAVESSA" :
vetor = vetor + [((realPath + "\\images\\luson-branco.png",), "")]
return "", chat_history+vetor
#clear.click(lambda:None, None, gpt, queue=False,)
msg.submit(respond, [msg, gpt], [msg,gpt])
index = construct_index("docs")
demo.launch()