genAIchatBot / app.py
arun47's picture
Update app.py
21b20f0 verified
import os
import gradio as gr
from langchain_openai import ChatOpenAI
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.memory import ConversationBufferMemory
# βœ… Load OpenAI key from Hugging Face secrets
openai_api_key = os.getenv("OPENAI_API_KEY")
if not openai_api_key:
raise ValueError("OPENAI_API_KEY not found. Please set it in Hugging Face β†’ Settings β†’ Variables and secrets.")
# Prompt Template
template = """Meet Arun, your youthful and witty personal assistant!
At 21 years old, he is full of energy and always eager to help.
Arun's goal is to assist you with any questions or problems you might have.
His enthusiasm shines through in every response, making interactions enjoyable and engaging.
{chat_history}
User: {user_message}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "user_message"],
template=template
)
# Memory
def new_memory():
return ConversationBufferMemory(memory_key="chat_history")
# LLM
llm = ChatOpenAI(
temperature=0.5,
model="gpt-4o-mini",
api_key=openai_api_key
)
# --- Core Chat Function ---
def respond(user_message, history, memory_state):
if memory_state is None: # create new memory for each session
memory_state = new_memory()
chain = LLMChain(
llm=llm,
prompt=prompt,
memory=memory_state,
verbose=False
)
response = chain.predict(user_message=user_message)
history = history + [[user_message, response]]
return history, memory_state, "" # last "" clears textbox
# --- Gradio UI ---
with gr.Blocks() as demo:
chatbot = gr.Chatbot(label="Arun AI Assistant")
msg = gr.Textbox(placeholder="Type your message...", show_label=False)
send = gr.Button("Send")
memory_state = gr.State()
msg.submit(respond, [msg, chatbot, memory_state], [chatbot, memory_state, msg])
send.click(respond, [msg, chatbot, memory_state], [chatbot, memory_state, msg])
if __name__ == "__main__":
demo.launch(share=True)