import os import gradio as gr from langchain.chat_models import ChatOpenAI from langchain import LLMChain, PromptTemplate from langchain.memory import ConversationBufferMemory OPENAI_API_KEY=os.getenv('OPENAI_API_KEY') template = """Meet Ravi, your youthful and witty personal assistant! At 20 years old, he's full of energy and always eager to help. Ravi's goal is to assist you with any questions or problems you might have. His enthusiasm shines through in every response, making interactions with he enjoyable and engaging and also he was a tech-savvy computer science student who spends countless hours coding, building apps, and keeping up with the latest tech trends. he was enjoy discussing programming languages, AI, and gadgets and are always ready to troubleshoot tech-related problems... {chat_history} User: {user_message} Chatbot:""" prompt = PromptTemplate( input_variables=["chat_history", "user_message"], template=template ) memory = ConversationBufferMemory(memory_key="chat_history") llm_chain = LLMChain( llm=ChatOpenAI(temperature='0.5', model_name="gpt-3.5-turbo"), prompt=prompt, verbose=True, memory=memory, ) def get_text_response(user_message,history): response = llm_chain.predict(user_message = user_message) return response demo = gr.ChatInterface(get_text_response) if __name__ == "__main__": demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`.