Spaces:
Sleeping
Sleeping
hasanriaz121
commited on
Commit
•
570faf9
1
Parent(s):
d8a5fc6
init
Browse files- Dockerfile +22 -0
- docs.pkl +3 -0
- main.py +82 -0
- requirements.txt +12 -0
- tafsir.py +42 -0
- test.ipynb +0 -0
Dockerfile
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
|
7 |
+
COPY ./docs.pkl /code/docs.pkl
|
8 |
+
|
9 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
10 |
+
|
11 |
+
RUN useradd -m -u 1000 user
|
12 |
+
|
13 |
+
USER user
|
14 |
+
|
15 |
+
ENV HOME=/home/user \
|
16 |
+
PATH=/home/user/.local/bin:$PATH
|
17 |
+
|
18 |
+
WORKDIR $HOME/app
|
19 |
+
|
20 |
+
COPY --chown=user . $HOME/app
|
21 |
+
|
22 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
docs.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5d19262ec39e645e836566e03f3852eedde6203589640e606a19b372f2862889
|
3 |
+
size 1350644
|
main.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, Request, Form
|
2 |
+
from fastapi.responses import HTMLResponse
|
3 |
+
import nest_asyncio
|
4 |
+
import uvicorn
|
5 |
+
import gradio as gr
|
6 |
+
|
7 |
+
from tafsir import tafsir
|
8 |
+
|
9 |
+
app=FastAPI()
|
10 |
+
@app.on_event("startup")
|
11 |
+
async def startup_event():
|
12 |
+
global bot
|
13 |
+
bot=tafsir()
|
14 |
+
|
15 |
+
@app.get("/",response_class=HTMLResponse)
|
16 |
+
async def home():
|
17 |
+
html_content = """
|
18 |
+
<html>
|
19 |
+
<head>
|
20 |
+
<title>Tafsir Topic</title>
|
21 |
+
</head>
|
22 |
+
<body>
|
23 |
+
<h1>Topic Input</h1>
|
24 |
+
<form method="post" action="/analyze/">
|
25 |
+
<input type="text" name="text" placeholder="Enter topic" autocomplete="off" required>
|
26 |
+
<input type="submit" value="Analyze">
|
27 |
+
</form>
|
28 |
+
</body>
|
29 |
+
</html>
|
30 |
+
"""
|
31 |
+
return HTMLResponse(content=html_content, status_code=200)
|
32 |
+
|
33 |
+
|
34 |
+
@app.post("/analyze/", response_class=HTMLResponse)
|
35 |
+
async def analyze_text(text: str = Form(...)):
|
36 |
+
# Assuming your model is a function that takes input and returns predictions
|
37 |
+
prediction = bot.return_tafsir(text)
|
38 |
+
html_content = """
|
39 |
+
<html>
|
40 |
+
<head>
|
41 |
+
<title>Analysis Result</title>
|
42 |
+
</head>
|
43 |
+
<body>
|
44 |
+
<h1>Analysis Result:</h1>
|
45 |
+
<p>Topic: {input_text}</p>
|
46 |
+
<p>Top 3 results: {prediction}</p>
|
47 |
+
<button><a href="/" >Back</a><button>
|
48 |
+
</body>
|
49 |
+
</html>
|
50 |
+
""".format(input_text=text, prediction=prediction)
|
51 |
+
return HTMLResponse(content=html_content, status_code=200)
|
52 |
+
|
53 |
+
@app.post("/test/")
|
54 |
+
async def test():
|
55 |
+
# Assuming your model is a function that takes input and returns predictions
|
56 |
+
prediction = bot.return_tafsir("tolerance")
|
57 |
+
print(prediction)
|
58 |
+
return "finished"
|
59 |
+
|
60 |
+
|
61 |
+
@app.get("/test/{inputs}")
|
62 |
+
def greet(inputs):
|
63 |
+
return "hello "+inputs
|
64 |
+
|
65 |
+
# @app.get("/gradio")
|
66 |
+
# async def gradio_test():
|
67 |
+
# iface = gr.Interface(fn=greet, inputs= [
|
68 |
+
# gr.Textbox(
|
69 |
+
# label="Input",
|
70 |
+
# info="Find ambiguities in the following",
|
71 |
+
# lines=3,
|
72 |
+
# value="The test can only continue if it receives all inputs from previous page.",
|
73 |
+
# ),
|
74 |
+
# ], outputs= gr.Textbox(
|
75 |
+
# label="Input",
|
76 |
+
# info="Find ambiguities in the following",
|
77 |
+
# lines=3,
|
78 |
+
# value="The test can only continue if it receives all inputs from previous page.",
|
79 |
+
# ),
|
80 |
+
# theme=gr.themes.Base())
|
81 |
+
|
82 |
+
# iface.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi
|
2 |
+
requests
|
3 |
+
torch==1.11.*
|
4 |
+
transformers
|
5 |
+
uvicorn[standard]
|
6 |
+
nest_asyncio==1.5.*
|
7 |
+
python-multipart
|
8 |
+
langchain
|
9 |
+
sentence-transformers
|
10 |
+
langchain_groq
|
11 |
+
pickle
|
12 |
+
chroma
|
tafsir.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import nest_asyncio
|
2 |
+
from llama_parse import LlamaParse
|
3 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
4 |
+
from langchain_community.vectorstores import Chroma
|
5 |
+
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
|
6 |
+
from langchain_groq import ChatGroq
|
7 |
+
from langchain_core.prompts import ChatPromptTemplate
|
8 |
+
from langchain_core.output_parsers import StrOutputParser
|
9 |
+
from langchain_core.runnables import RunnablePassthrough
|
10 |
+
import pickle
|
11 |
+
|
12 |
+
|
13 |
+
class tafsir:
|
14 |
+
def __init__(self):
|
15 |
+
pkl_docs=pickle.load(open("docs.pkl","rb"))
|
16 |
+
self.store=Chroma.from_documents(documents=pkl_docs,embedding=HuggingFaceEmbeddings(model_name="BAAI/bge-base-en-v1.5"))
|
17 |
+
self.retreiver=self.store.as_retriever()
|
18 |
+
llm=ChatGroq(api_key="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",model="mixtral-8x7b-32768")
|
19 |
+
rag_template = """
|
20 |
+
Provide a summary from the context, which contains interpretations of Quranic Texts that highlight the importance of the topic mentioned in the question. Do not include the Quranic Texts themselves, but mention which Surah and verse.
|
21 |
+
|
22 |
+
Context:
|
23 |
+
{context}
|
24 |
+
|
25 |
+
Question:
|
26 |
+
{question}
|
27 |
+
"""
|
28 |
+
rag_prompt=ChatPromptTemplate.from_template(rag_template)
|
29 |
+
self.rag_chain=(
|
30 |
+
{"context":self.retreiver,"question":RunnablePassthrough()}
|
31 |
+
| rag_prompt
|
32 |
+
| llm
|
33 |
+
| StrOutputParser()
|
34 |
+
)
|
35 |
+
|
36 |
+
def return_tafsir(self,topic):
|
37 |
+
response=self.rag_chain.invoke(topic)
|
38 |
+
# print(response)
|
39 |
+
return response
|
40 |
+
|
41 |
+
|
42 |
+
|
test.ipynb
ADDED
File without changes
|