File size: 1,618 Bytes
b9857b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import torch
import gradio as gr

from transformers import pipeline

pipe = pipeline(
    "question-answering", 
    model="deepset/roberta-base-squad2")

# Function to read the content of a file object
def read_file_content(file_obj):
    """
    Reads the content of a file object and returns it.
    Parameters:
    file_obj (file object): The file object to read from.
    Returns:
    str: The content of the file.
    """
    try:
        with open(file_obj.name, 'r', encoding='utf-8') as file:
            context = file.read()
            return context
    except Exception as e:
        return f"An error occurred: {e}"

# Function to get the answer to a question from a file
def get_answer(file, question):
    """
    Answers a question based on the content of a file.
    Parameters:
    file (file object): The file object containing the context.
    question (str): The question to answer.
    Returns:
    str: The answer to the question.
    """
    if not question or not file:
        return "Please provide both a question and a file."
    context = read_file_content(file)
    answer = pipe(question=question, context=context)
    return answer["answer"]

# Create the Gradio interface
demo = gr.Interface(fn=get_answer,
                    inputs=[gr.File(label="File Upload"), gr.Textbox(label="Prompt Input", lines=1)],
                    outputs=[gr.Textbox(label="Response", lines=1)],
                    title="@caesar-2series: Rag Application",
                    description="Retrieval Augmented Generation Questions-Answering Application")

# Launch the Gradio interface
demo.launch()