Spaces:
Sleeping
Sleeping
import gradio as gr | |
from .generator import generate | |
# --------------------------------------------------------------------- | |
# Gradio Interface with MCP support | |
# --------------------------------------------------------------------- | |
ui = gr.Interface( | |
fn=generate, | |
inputs=[ | |
gr.Textbox( | |
label="Query", | |
lines=2, | |
placeholder="Enter query here", | |
info="The query to search for in the vector database" | |
), | |
gr.Textbox( | |
label="Context", | |
lines=8, | |
placeholder="Paste relevant context here", | |
info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything" | |
), | |
], | |
outputs=gr.Textbox( | |
label="Generated Answer", | |
lines=6, | |
show_copy_button=True | |
), | |
title="ChatFed Generation Module", | |
description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).", | |
api_name="generate" | |
) | |
# Launch with MCP server enabled | |
if __name__ == "__main__": | |
ui.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
mcp_server=True, | |
show_error=True | |
) | |