2seriescs's picture
Update app.py
4d865e8 verified
import torch
import gradio as gr
# Use a pipeline as a high-level helper
from transformers import pipeline
question_answer = pipeline(
"question-answering",
model="deepset/roberta-base-squad2")
def read_file_content(file_obj):
"""
Reads the content of a file object and returns it.
Parameters:
file_obj (file object): The file object to read from.
Returns:
str: The content of the file.
"""
try:
with open(file_obj.name, 'r', encoding='utf-8') as file:
context = file.read()
return context
except Exception as e:
return f"An error occurred: {e}"
def get_answer(file, question):
context = read_file_content(file)
answer = question_answer(question=question, context=context)
return answer["answer"]
demo = gr.Interface(fn=get_answer, inputs=[gr.File(label="File Upload"), gr.Textbox(label="Prompt Input", lines=1)],
outputs=[gr.Textbox(label="Response", lines=1)],
title="@caesar-2series: Rag Application",
description="Retrieval Augmented Generation Questions-Answering Application")
demo.launch()