GenerativeQNA / app.py
Ram7379's picture
Update app.py
e55e9e1 verified
import gradio as gr
from transformers import pipeline
# Load QA Model
qa_pipeline = pipeline(
task="question-answering",
model="deepset/roberta-base-squad2"
)
# Prediction Function
def answer_question(context, question):
if not context.strip() or not question.strip():
return "Please enter both context and question."
result = qa_pipeline(
question=question,
context=context
)
return result["answer"]
# Gradio UI
demo = gr.Interface(
fn=answer_question,
inputs=[
gr.Textbox(
lines=8,
label="Context",
placeholder="Enter paragraph or context here..."
),
gr.Textbox(
lines=2,
label="Question",
placeholder="Ask your question..."
)
],
outputs=gr.Textbox(label="Answer"),
title="RoBERTa Question Answering System",
description="Ask questions from the given context using RoBERTa."
)
# Launch for Hugging Face Spaces
demo.launch(server_name="0.0.0.0", server_port=7860)