# coding=utf8 from llama_index import load_index_from_storage, SimpleDirectoryReader, readers, GPTVectorStoreIndex,StorageContext, ServiceContext, LLMPredictor, PromptHelper from langchain import OpenAI import gradio as gr import random import time import sys import os from transformers import pipeline p = pipeline("automatic-speech-recognition") os.environ["OPENAI_API_KEY"] css = """ .gradio-container { background-color: #ffffff; } #component-2 {position: absolute; bottom: 0; width: 100%; } .app.svelte-ac4rv4>.main.svelte-ac4rv4 { display: flex; flex-grow: 1; flex-direction: column; background-image: url(https://i.ibb.co/3rVCQz0/background-GPT-1.png); } div.svelte-awbtu4 { display: flex; flex-direction: inherit; flex-wrap: wrap; gap: var(--form-gap-width); box-shadow: var(--block-shadow); border: var(--block-border-width) solid #5f0000; border-radius: var(--radius-lg); background: #ffffff; overflow: hidden; position: fixed; bottom: 0; margin-left: -16px; } .bot.svelte-6roggh.svelte-6roggh,.pending.svelte-6roggh.svelte-6roggh { border-color: var(--border-color-primary); background: #00adef; color: white; font-weight: bolder; } div.float.svelte-1frtwj3 { position: absolute; opacity: 0; top: var(--block-label-margin); left: var(--block-label-margin);} .wrap.svelte-6roggh.svelte-6roggh { padding: var(--block-padding); height: 100%; max-height: 100%; overflow-y: auto; } div.user.svelte-6roggh.svelte-6roggh { background: #0D1233; color: white; font-weight: bolder; } div.svelte-1frtwj3 { display: inline-flex; align-items: center; z-index: var(--layer-2); box-shadow: var(--block-shadow); border: var(--block-label-border-width) solid #ffffff; border-top: none; border-left: none; border-radius: var(--block-label-radius); background: #eff6ff; padding: var(--block-label-padding); pointer-events: none; color: var(--block-label-text-color); font-weight: var(--block-label-text-weight); width: 100%; line-height: var(--line-sm); } div.bot.svelte-h.svelte-6roggh { background: #199FDA; color: white; font-weight: bolder; } div.bot.svelte-17nzccn.svelte-17nzccn { background: #199FDA; } div.user.svelte-6roggh.svelte-6roggh { background: #0D1233; } div.user.svelte-17nzccn.svelte-17nzccn { background: #0D1233; } div.textBoxBot { display: flex; flex-direction: inherit; flex-wrap: wrap; gap: var(--form-gap-width); box-shadow: var(--block-shadow); border: var(--block-border-width) solid #0D1233; border-radius: var(--radius-lg); background: #ffffff; overflow: hidden; position: fixed; bottom: 0; margin-left: -16px; } .textarea.svelte-1pie7s6.svelte-1pie7s6 { display: flex; flex-direction: inherit; flex-wrap: wrap; gap: var(--form-gap-width); box-shadow: var(--block-shadow); border: var(--block-border-width) solid #0D1233; border-radius: var(--radius-lg); background: #ffffff; overflow: hidden; position: fixed; bottom: 0; margin-left: -16px; } .svelte-1pie7s6.svelte-1pie7s6 { display: flex; flex-direction: inherit; flex-wrap: wrap; gap: var(--form-gap-width); box-shadow: var(--block-shadow); border: 5px solid #0D1233; border-radius: var(--radius-lg); border-color: #0D1233; background: #ffffff; color: #0D1233; font-size: 16px; overflow: hidden; position: fixed; bottom: 20px; /* Ajuste a distância vertical do rodapé */ margin-left: -5px; max-height: 80vh; /* Ajuste a altura máxima da div */ max-width: 78%; /* Ajuste a largura máxima da div */ } .img.svelte-ms5bsk { width: 100%; height: 100%; background-color: #ffffff; border: 0px; border-width: 0px; } .app.svelte-ac4rv4.svelte-ac4rv4 { max-width: none; background-color: #ffffff; } .app.svelte-ac4rv4.svelte-ac4rv4{max-width:none} .wrap.svelte-1o68geq.svelte-1o68geq {max-height: none} .block.svelte-mppz8v { position: relative; margin: 0; box-shadow: var(--block-shadow); border-width: var(--block-border-width); border-color: #ffffff; border-radius: var(--block-radius); background: #ffffff; width: 100%; line-height: var(--line-sm); } """ md = """This is some code: hello ```py def fn(x, y, z): print(x, y, z) """ def transcribe(audio): text = p(audio)["text"] return text def construct_index(directory_path): num_outputs = 2000 prompt_helper = PromptHelper(context_window=3900, num_output=256, max_chunk_overlap=20, chunk_size_limit=1024) llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.0, model_name="gpt-3.5-turbo-16k", max_tokens=num_outputs)) documents = SimpleDirectoryReader(directory_path).load_data() service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context, prompt_helper=prompt_helper) index.storage_context.persist(persist_dir='index.json') return index def chatbot(input_text): num_outputs = 4097 prompt_helper = PromptHelper(context_window=3900, num_output=256, max_chunk_overlap=20, chunk_size_limit=1024) llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.0, model_name="gpt-3.5-turbo-16k", max_tokens=num_outputs)) service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) storage_context = StorageContext.from_defaults(persist_dir='index.json') # load index index = load_index_from_storage(storage_context) query_engine = index.as_query_engine(service_context=service_context, verbose=True, response_mode="compact") response = query_engine.query(input_text) return str(response.response) with gr.Blocks(css=css, title='Exposuper', elem_classes=".app.svelte-ac4rv4.svelte-ac4rv4") as demo: realPath = str(os.path.dirname(os.path.realpath(__file__))) img1 = gr.Image("images/exposuper.png", elem_classes=".img.svelte-ms5bsk", elem_id="img.svelte-ms5bsk").style(container=False) gpt = gr.Chatbot(label="Converse com GPT Super da CD2",elem_classes=".wrap.svelte-1o68geq.svelte-1o68geq", elem_id="chatbot").style(container=True) msg = gr.Textbox(elem_id="div.svelte-awbtu4",elem_classes="div.svelte-awbtu4", show_label=False, placeholder="Bem vindo ao ExpoSuper, Qual sua pergunta?", ).style(container=True) # clear = gr.Button("Limpar Conversa") # gr.Audio(source="microphone", type="filepath",label="ESTÁ COM DIFICULDADES EM ESCREVER? CLIQUE E ME DIGA O QUE DESEJA") def respond(message, chat_history): chat_history.append((message, chatbot(message))) time.sleep(1) realPath = str(os.path.dirname(os.path.realpath(__file__))) return "", chat_history # clear.click(lambda:None, None, gpt, queue=False,) msg.submit(respond, [msg, gpt], [msg,gpt]) index = construct_index("docs") demo.launch()