2seriescs commited on
Commit
4d865e8
1 Parent(s): 0f568b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -23
app.py CHANGED
@@ -1,13 +1,13 @@
1
  import torch
2
  import gradio as gr
3
 
 
4
  from transformers import pipeline
5
 
6
- pipe = pipeline(
7
  "question-answering",
8
  model="deepset/roberta-base-squad2")
9
 
10
- # Function to read the content of a file object
11
  def read_file_content(file_obj):
12
  """
13
  Reads the content of a file object and returns it.
@@ -22,29 +22,14 @@ def read_file_content(file_obj):
22
  return context
23
  except Exception as e:
24
  return f"An error occurred: {e}"
25
-
26
- # Function to get the answer to a question from a file
27
  def get_answer(file, question):
28
- """
29
- Answers a question based on the content of a file.
30
- Parameters:
31
- file (file object): The file object containing the context.
32
- question (str): The question to answer.
33
- Returns:
34
- str: The answer to the question.
35
- """
36
- if not question or not file:
37
- return "Please provide both a question and a file."
38
  context = read_file_content(file)
39
- answer = pipe(question=question, context=context)
40
  return answer["answer"]
41
 
42
- # Create the Gradio interface
43
- demo = gr.Interface(fn=get_answer,
44
- inputs=[gr.File(label="File Upload"), gr.Textbox(label="Prompt Input", lines=1)],
45
- outputs=[gr.Textbox(label="Response", lines=1)],
46
- title="@caesar-2series: Rag Application",
47
- description="Retrieval Augmented Generation Questions-Answering Application")
48
-
49
- # Launch the Gradio interface
50
  demo.launch()
 
1
  import torch
2
  import gradio as gr
3
 
4
+ # Use a pipeline as a high-level helper
5
  from transformers import pipeline
6
 
7
+ question_answer = pipeline(
8
  "question-answering",
9
  model="deepset/roberta-base-squad2")
10
 
 
11
  def read_file_content(file_obj):
12
  """
13
  Reads the content of a file object and returns it.
 
22
  return context
23
  except Exception as e:
24
  return f"An error occurred: {e}"
25
+
 
26
  def get_answer(file, question):
 
 
 
 
 
 
 
 
 
 
27
  context = read_file_content(file)
28
+ answer = question_answer(question=question, context=context)
29
  return answer["answer"]
30
 
31
+ demo = gr.Interface(fn=get_answer, inputs=[gr.File(label="File Upload"), gr.Textbox(label="Prompt Input", lines=1)],
32
+ outputs=[gr.Textbox(label="Response", lines=1)],
33
+ title="@caesar-2series: Rag Application",
34
+ description="Retrieval Augmented Generation Questions-Answering Application")
 
 
 
 
35
  demo.launch()