Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from langchain.chat_models import ChatOpenAI | |
from langchain import LLMChain, PromptTemplate | |
from langchain.memory import ConversationBufferMemory | |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY') | |
template = """ou are a sports-loving high school student with a keen interest in multiple sports, from soccer and basketball to tennis and swimming. You closely follow sports events, stats, and news, making you the go-to person for all sports-related discussions and predictions. | |
{chat_history} | |
User: {user_message} | |
Chatbot:""" | |
prompt = PromptTemplate( | |
input_variables=["chat_history", "user_message"], template=template | |
) | |
memory = ConversationBufferMemory(memory_key="chat_history") | |
llm_chain = LLMChain( | |
llm=ChatOpenAI(temperature='0.5', model_name="gpt-3.5-turbo"), | |
prompt=prompt, | |
verbose=True, | |
memory=memory, | |
) | |
def get_text_response(user_message,history): | |
response = llm_chain.predict(user_message = user_message) | |
return response | |
demo = gr.ChatInterface(get_text_response) | |
if __name__ == "__main__": | |
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`. | |