Spaces:
Running
Running
File size: 11,410 Bytes
73633bf 5aa00b5 866e761 4b8599b 5aa00b5 866e761 5aa00b5 866e761 5aa00b5 866e761 480139d 5a585e3 866e761 5aa00b5 5a585e3 866e761 5aa00b5 866e761 5aa00b5 866e761 5aa00b5 866e761 5aa00b5 866e761 5aa00b5 9b3a74a 866e761 6cb6e8b 9b3a74a 866e761 6cb6e8b 866e761 9b3a74a 866e761 6cb6e8b 866e761 6cb6e8b 866e761 480139d 866e761 2bb216f 866e761 2bb216f 4b8599b 3e38fd1 2bb216f 6cb6e8b 480139d 866e761 2bb216f 866e761 cfd82c3 6cb6e8b 73633bf 6cb6e8b 73633bf 6cb6e8b 24430f3 4b8599b f56586e 6cb6e8b f56586e 4b8599b f56586e 6cb6e8b b4c9424 6cb6e8b 4b8599b 6cb6e8b 4b8599b 6cb6e8b bca6ba8 4b8599b 73633bf 4b8599b 6cb6e8b 73633bf bca6ba8 4b8599b 6cb6e8b bca6ba8 6cb6e8b 2f557a8 cfd82c3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 |
# ================================
# ChatVLD Futurista - Gradio 5.44.0
# ================================
import os
from pathlib import Path
import requests
import gradio as gr
import time
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.document_loaders import PyMuPDFLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.chains import create_history_aware_retriever, create_retrieval_chain
from langchain.chains.combine_documents import create_stuff_documents_chain
# ================================
# API KEY
# ================================
if "GROQ_API_KEY" not in os.environ:
raise ValueError("❌ A variável de ambiente GROQ_API_KEY não está definida.")
# ================================
# PDFs
# ================================
pdf_urls = {
"Codeline SS5632": "https://drive.google.com/uc?id=1s1OPWbxxu8ADBQBjmTfPe1tj-aLcEEIH",
"Linx 7900": "https://drive.google.com/uc?id=1GVbPq8SDriIS5CQo0kT0EZEqwWwjGJmY"
}
for pdf_name, pdf_url in pdf_urls.items():
pdf_path = f"{pdf_name}.pdf"
if not Path(pdf_path).exists():
r = requests.get(pdf_url)
with open(pdf_path, "wb") as f:
f.write(r.content)
# ================================
# CONFIG
# ================================
ID_MODEL = "deepseek-r1-distill-llama-70b"
TEMPERATURE = 0.7
# ================================
# FUNÇÕES
# ================================
def load_llm(model_id, temperature):
return ChatGroq(
model=model_id,
temperature=temperature,
groq_api_key=os.environ["GROQ_API_KEY"],
)
def extract_text_pdf(file_path):
loader = PyMuPDFLoader(file_path)
docs = loader.load()
return "\n".join([page.page_content for page in docs])
def config_retriever(pdf_files, nome_impressora):
embeddings = HuggingFaceEmbeddings(model_name="BAAI/bge-m3")
faiss_path = f"index_faiss_{nome_impressora.replace(' ', '_')}"
if Path(faiss_path).exists():
vectorstore = FAISS.load_local(faiss_path, embeddings, allow_dangerous_deserialization=True)
else:
all_texts = ""
for file_path in pdf_files:
all_texts += extract_text_pdf(file_path) + "\n"
splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
chunks = splitter.split_text(all_texts)
vectorstore = FAISS.from_texts(chunks, embeddings)
vectorstore.save_local(faiss_path)
return vectorstore.as_retriever(search_type='mmr', search_kwargs={'k': 3, 'fetch_k': 4})
def config_rag_chain(llm, retriever):
context_q_prompt = ChatPromptTemplate.from_messages([
("system","Dada a conversa e a pergunta, formule uma pergunta independente."),
MessagesPlaceholder("chat_history"),
("human", "Pergunta: {input}"),
])
hist_aware_retriever = create_history_aware_retriever(
llm=llm, retriever=retriever, prompt=context_q_prompt
)
system_prompt = "Você é um assistente virtual futurista da empresa VLD. Responda de forma clara e objetiva em português. Se não souber, diga que não sabe."
qa_prompt = ChatPromptTemplate.from_messages([
("system", system_prompt),
MessagesPlaceholder("chat_history"),
("human", "Pergunta: {input}\n\nContexto: {context}"),
])
qa_chain = create_stuff_documents_chain(llm, qa_prompt)
return create_retrieval_chain(hist_aware_retriever, qa_chain)
# ================================
# VARIÁVEIS GLOBAIS
# ================================
llm = load_llm(ID_MODEL, TEMPERATURE)
retrievers_cache = {}
chains_cache = {}
chat_history = [AIMessage(content="🚀 Olá, sou o seu suporte virtual futurista! Como posso te ajudar?")]
for nome in pdf_urls.keys():
retrievers_cache[nome] = config_retriever([f"{nome}.pdf"], nome)
chains_cache[nome] = config_rag_chain(llm, retrievers_cache[nome])
current_chain = chains_cache["Codeline SS5632"]
def set_impressora(nome_impressora):
global current_chain
current_chain = chains_cache[nome_impressora]
return f"📂 Impressora selecionada: {nome_impressora}"
def responder(pergunta):
global current_chain
if current_chain is None:
return "⚠️ Escolha primeiro a impressora."
chat_history.append(HumanMessage(content=pergunta))
try:
resposta = current_chain.invoke({"input": pergunta, "chat_history": chat_history})["answer"]
resposta = resposta.split("</think>")[-1].strip() if "</think>" in resposta else resposta.strip()
except Exception as e:
resposta = f"❌ Erro: {str(e)}"
chat_history.append(AIMessage(content=resposta))
return resposta
# ================================
# FALA DO CALANGO CHICO
# ================================
def fala_calango(aba):
if aba == "CHATVLD":
return """
<div style="display:flex;flex-direction:column;align-items:flex-start;margin-bottom:20px;margin-left:20px;">
<div style="display:inline-block;background:#f1f1f1;border:2px solid #555;border-radius:15px;
padding:8px 15px;font-weight:bold;color:#333;font-size:14px;position:relative;margin-bottom:10px;">
Pronto pra tirar suas dúvidas sobre impressoras! 🖨️
<div style="content:'';position:absolute;bottom:-15px;left:25px;width:0;height:0;
border-left:10px solid transparent;border-right:10px solid transparent;
border-top:15px solid #555;"></div>
<div style="content:'';position:absolute;bottom:-13px;left:26px;width:0;height:0;
border-left:9px solid transparent;border-right:9px solid transparent;
border-top:13px solid #f1f1f1;"></div>
</div>
<img src="https://raw.githubusercontent.com/aislantavares329-eng/chatvld/main/mascote.png" width="120">
</div>
"""
else:
return """
<div style="display:flex;flex-direction:column;align-items:flex-start;margin-bottom:20px;margin-left:20px;">
<div style="display:inline-block;background:#f1f1f1;border:2px solid #555;border-radius:15px;
padding:8px 15px;font-weight:bold;color:#333;font-size:14px;position:relative;margin-bottom:10px;">
Aqui eu te levo pro portal futurista! 🌐
<div style="content:'';position:absolute;bottom:-15px;left:25px;width:0;height:0;
border-left:10px solid transparent;border-right:10px solid transparent;
border-top:15px solid #555;"></div>
<div style="content:'';position:absolute;bottom:-13px;left:26px;width:0;height:0;
border-left:9px solid transparent;border-right:9px solid transparent;
border-top:13px solid #f1f1f1;"></div>
</div>
<img src="https://raw.githubusercontent.com/aislantavares329-eng/chatvld/main/mascote.png" width="120">
</div>
"""
# ================================
# CSS FUTURISTA
# ================================
custom_css = """
.gradio-chatbot { background-color: #f8f9fa; color: #111; }
.gradio-textbox textarea { background-color: #1c1c1c; color: #fff; border-radius: 8px; border: 1px solid #333; padding: 8px; }
.gradio-button, .gradio-button:active { background: linear-gradient(to right, #00c6ff, #0072ff); color: #fff; border: none; border-radius: 8px; }
#titulo-principal {
text-align: center;
font-size: 40px;
font-weight: bold;
background: linear-gradient(90deg, #00c6ff, #8a2be2, #ff0080, #00c6ff);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
animation: brilho 5s linear infinite;
margin-bottom: 20px;
}
@keyframes brilho {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
"""
# ================================
# INTERFACE GRADIO
# ================================
with gr.Blocks(css=custom_css, theme="soft") as iface:
gr.Markdown("🛸 SUPORTE AUTOMATIZADO", elem_id="titulo-principal")
fala_box = gr.HTML(fala_calango("CHATVLD"))
with gr.Tabs() as tabs:
# Aba Chat Futurista
with gr.TabItem("💭 CHATVLD") as chat_tab:
gr.Markdown("📌 **Por gentileza, escolha a impressora que deseja consultar.**")
impressora_select = gr.Dropdown(
choices=list(pdf_urls.keys()),
label="Selecione a impressora",
value="Codeline SS5632"
)
status_box = gr.Textbox(label="Status", interactive=False)
def troca_impressora(nome_impressora):
return "⏳ Montando base de conhecimento, aguarde...", set_impressora(nome_impressora)
impressora_select.change(
fn=troca_impressora,
inputs=impressora_select,
outputs=[status_box, status_box]
)
chatbot = gr.Chatbot(type="messages")
with gr.Row():
txt = gr.Textbox(placeholder="Diz o que tu quer macho...", show_label=False, lines=2)
submit_btn = gr.Button("🚀 Arrocha")
with gr.Row():
clear_btn = gr.Button("🧹 Barrer a prosa")
new_chat_btn = gr.Button("✨ Nova prosa")
def enviar(msg, history):
history.append({"role": "user", "content": msg})
yield history, ""
history.append({"role": "assistant", "content": "🤖 Digitando..."})
yield history, ""
resposta = responder(msg)
history[-1] = {"role": "assistant", "content": resposta}
yield history, ""
def limpar():
chat_history.clear()
return [], ""
def novo_chat():
chat_history.clear()
chat_history.append(AIMessage(content="🤖 Novo chat iniciado. Como posso te ajudar?"))
return [{"role": "assistant", "content": "🤖 Novo chat iniciado. Como posso te ajudar?"}], ""
txt.submit(enviar, [txt, chatbot], [chatbot, txt])
submit_btn.click(enviar, [txt, chatbot], [chatbot, txt])
clear_btn.click(limpar, outputs=[chatbot, txt])
new_chat_btn.click(novo_chat, outputs=[chatbot, txt])
# Aba VALID NODE
with gr.TabItem("🌐 VALID N.O.D.E") as node_tab:
gr.Markdown("### Acesse o VALID NODE clicando no botão abaixo:")
gr.HTML('<button onclick="window.open(\'https://172.17.200.97\', \'_blank\')" '
'style="background:linear-gradient(to right,#00c6ff,#0072ff);color:#fff;border:none;'
'border-radius:8px;padding:10px 20px;font-size:16px;cursor:pointer;">🖥️ VALID N.O.D.E</button>')
# Atualiza a fala do Calango conforme a aba
def troca_aba(evt: gr.SelectData):
return fala_calango(evt.value.replace("💭 ","").replace("🌐 ",""))
tabs.select(fn=troca_aba, outputs=fala_box)
iface.launch() |