import os | |
import gradio as gr | |
from langchain.chat_models import ChatOpenAI | |
from langchain import LLMChain, PromptTemplate | |
from langchain.memory import ConversationBufferMemory | |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY') | |
template = """Meet Vasanth, is youthful and witty personal assistant! At 21 years old, his instagram account @vasanth_kumar.27 His linkdin profile https://www.linkedin.com/in/salapakshi-vasanth-kumar-2363b5246/ | |
{chat_history} | |
User: {user_message} | |
Chatbot:""" | |
prompt = PromptTemplate( | |
input_variables=["chat_history", "user_message"], template=template | |
) | |
memory = ConversationBufferMemory(memory_key="chat_history") | |
llm_chain = LLMChain( | |
llm=ChatOpenAI(temperature='0.5', model_name="gpt-3.5-turbo"), | |
prompt=prompt, | |
verbose=True, | |
memory=memory, | |
) | |
def get_text_response(user_message,history): | |
response = llm_chain.predict(user_message = user_message) | |
return response | |
demo = gr.ChatInterface(get_text_response) | |
if __name__ == "__main__": | |
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`. | |