import gradio as gr | |
# Mock LLM function for demonstration. Replace this with your actual LLM call. | |
def ask_llm(question): | |
# Here you would normally interact with an LLM. For demonstration, we'll just echo the question. | |
return f"LLM Response: {question}" | |
def chat_with_llm(user_input): | |
return ask_llm(user_input) | |
# Create the Gradio interface | |
iface = gr.Interface(fn=chat_with_llm, | |
inputs=gr.inputs.Textbox(lines=2, placeholder="Ask me anything!"), | |
outputs="text", | |
title="Chat with LLM", | |
description="Type your question below and get responses from an LLM.") | |
# Launch the app | |
iface.launch() |