Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import os
|
3 |
+
from rag_pipeline import RAGPipeline
|
4 |
+
import openai
|
5 |
+
openai.api_key = os.environ.get('OPENAI_API_KEY')
|
6 |
+
|
7 |
+
# Initialize the RAG pipeline
|
8 |
+
rag = RAGPipeline("metadata_map.json", "pdfs")
|
9 |
+
|
10 |
+
def process_query(question, response_format):
|
11 |
+
response = rag.query(question)
|
12 |
+
|
13 |
+
if response_format == "Markdown":
|
14 |
+
return response["markdown"]
|
15 |
+
else:
|
16 |
+
return response["raw"]
|
17 |
+
|
18 |
+
# Define the Gradio interface
|
19 |
+
iface = gr.Interface(
|
20 |
+
fn=process_query,
|
21 |
+
inputs=[
|
22 |
+
gr.Textbox(lines=2, placeholder="Enter your question here...", label="Question"),
|
23 |
+
gr.Radio(["Markdown", "Raw Text"], label="Response Format", value="Markdown")
|
24 |
+
],
|
25 |
+
outputs=gr.Markdown(label="Response"),
|
26 |
+
title="Vaccine Coverage and Hesitancy Research QA",
|
27 |
+
description="Ask questions about vaccine coverage and hesitancy. The system will provide answers based on the available research papers.",
|
28 |
+
examples=[
|
29 |
+
["What are the main factors contributing to vaccine hesitancy?", "Markdown"],
|
30 |
+
["What are the current vaccine coverage rates in African countries?", "Raw Text"],
|
31 |
+
],
|
32 |
+
allow_flagging="never"
|
33 |
+
)
|
34 |
+
|
35 |
+
# Launch the app
|
36 |
+
iface.launch()
|