here-auto2 / app.py
thejas-gida's picture
Duplicate from thejas-gida/here-auto1
66e1c81
raw
history blame
1.89 kB
import gradio as gr
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
from langchain.agents import Tool
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
from langchain.chat_models import ChatOpenAI
from langchain.agents import initialize_agent
from langchain import OpenAI
from langchain.prompts import PromptTemplate
PREFIX = ''' You are an Automobile expert AI scientist having all the knowledge about all the existing cars and bikes and all the information around it.
'''
SUFFIX = '''
Begin!
Previous conversation history:
{chat_history}
Instructions: {input}
{agent_scratchpad}
'''
index = GPTSimpleVectorIndex.load_from_disk('./cars_bikes(2).json')
tools = [Tool(
name = "LlamaIndex",
func=lambda q: str(index.query(q)),
description="You are an Automobile expert equipped with all the information related to cars and bikes. If the question is not related to cars, bike models, then please let the user know that My expertise lies only in automobiles, I can answer questions around the same. The input to this tool should be relevant to cars and bikes in Indian currency",
return_direct=True
)]
conversational_memory = ConversationBufferWindowMemory( memory_key='chat_history', k=5, return_messages=True )
llm = OpenAI(temperature=0.3, model_name="text-davinci-003")
agent_executor = initialize_agent(tools, llm, agent="conversational-react-description", memory=conversational_memory,agent_kwargs={'prefix':PREFIX,'suffix': SUFFIX})
def greet(Question):
return agent_executor.run(input=Question)
demo = gr.Interface(fn=greet, inputs=gr.Textbox(lines=2, label ="Question", placeholder="Enter the query"), outputs="text",title="Here Auto",description="Know everything about Cars and Bikes")
demo.launch()