Spaces:
Sleeping
Sleeping
File size: 4,395 Bytes
60ab574 fd406e8 60ab574 e7f77a8 60ab574 de102ee 60ab574 e7f77a8 60ab574 fd406e8 e811c46 60ab574 6989e25 60ab574 e7f77a8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
from langchain.vectorstores import FAISS
from langchain.chains import RetrievalQA
from langchain.llms import HuggingFaceHub
import gradio as gr
import os
from langchain.embeddings import HuggingFaceEmbeddings
from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
from langchain.document_loaders import PyPDFDirectoryLoader
from langchain.document_loaders.csv_loader import CSVLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
import io
import contextlib
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
vector_store= FAISS.load_local("vector_db/", embeddings)
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1"
llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={"temperature": 0.01, "max_new_tokens": 2048})
retriever = vector_store.as_retriever(
search_type="similarity",
search_kwargs={"k":3, "include_metadata": True})
agent=create_csv_agent(llm,['data/Gretel_Data.csv','data/RAN_Data _T.csv'],verbose=True)
def echo(message, history):
try:
qa=RetrievalQA.from_chain_type(llm=llm, retriever=retriever,return_source_documents=True)
message= "Your name is Clara. You are a senior telecom network engineer having access to troubleshooting tickets data and other technical and product documentation.Stick to the knowledge from these tickets. Ask clarification questions if needed. "+message
result=qa({"query":message})
bold_answer= "<b>" + result['result'] + "</b>"
return bold_answer + "<br></br>" +'1. ' + str(result["source_documents"][0]) +"<br>" + '2. ' + str(result["source_documents"][1]) + "<br>" + "3. " + str(result["source_documents"][2])
except Exception as e:
error_message = f"An error occurred: {e}"+str(e.with_traceback) + str(e.args)
def echo_agent(message, history):
message="There are 2 df's. If you find a KeyError check for the same in the other df." + "<br>" + message
try:
with io.StringIO() as buffer:
with contextlib.redirect_stdout(buffer):
result= agent.run(message)
verbose_output = buffer.getvalue()
verbose_output = verbose_output.replace("\x1b[36;1m\x1b[1;3m", "")
verbose_output = verbose_output.replace("[1m> ", "")
verbose_output = verbose_output.replace("[0m", "")
verbose_output = verbose_output.replace("[32;1m[1;3m", "")
result= "<b>" + verbose_output + "<br>" + result + "</b>"
return result
except Exception as e:
error_message = f"An error occurred: {e}"+str(e.with_traceback) + str(e.args)
return error_message
demo=gr.ChatInterface(
fn=echo,
chatbot=gr.Chatbot(height=300, label="Hi I am Clara!", show_label=True),
textbox=gr.Textbox(placeholder="Ask me a question", container=True, autofocus=True, scale=7),
title="Network Ticket Knowledge Management",
description="<span style='font-size: 16x;'>Welcome to Verizon Network Operations Center!! I am here to help the Verizon Field Operations team with technical queries & escalation. I am trained on 1000s of RAN, Backhaul, Core network & End user equipment trouble tickets. Ask me!!! ☺</span>",
theme=gr.themes.Soft(),
examples=["wifi connected but no internet showing", "internet stopped working after primary link down", "internet stopped working link not shifted to secondary after primary link down"],
cache_examples=False,
retry_btn=None,
undo_btn="Delete Previous",
clear_btn="Clear",
stop_btn="Stop",
)
demo1=gr.ChatInterface(
fn=echo_agent,
chatbot=gr.Chatbot(height=300, label="Hi I am Sam!", show_label=True),
textbox=gr.Textbox(placeholder="Ask me a question", container=True, autofocus=True, scale=7),
title="LLM Powered Agent",
description="<span style='font-size: 16x;'>Welcome to Verizon RAN Visualization & Analytics powered by GEN AI. I have access 100 of metrices generated by a RAN base station and can help in visualizing, correlating and generating insights, using power of Conversational AI ☺</span>",
theme=gr.themes.Soft(),
retry_btn=None,
undo_btn="Delete Previous",
clear_btn="Clear",
stop_btn="Stop",
)
demo2=gr.TabbedInterface([demo,demo1],["RAG","AGENT"], title='INCEDO', theme=gr.themes.Soft())
demo2.launch(share=True,debug=True,auth=("admin", "Sam&Clara"))
|