File size: 2,397 Bytes
570faf9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from fastapi import FastAPI, Request, Form
from fastapi.responses import HTMLResponse
import nest_asyncio
import uvicorn
import gradio as gr

from tafsir import tafsir

app=FastAPI()
@app.on_event("startup")
async def startup_event():
    global bot
    bot=tafsir()

@app.get("/",response_class=HTMLResponse)
async def home():
    html_content = """
    <html>
        <head>
            <title>Tafsir Topic</title>
        </head>
        <body>
            <h1>Topic Input</h1>
            <form method="post" action="/analyze/">
                <input type="text" name="text" placeholder="Enter topic" autocomplete="off" required>
                <input type="submit" value="Analyze">
            </form>
        </body>
    </html>
    """
    return HTMLResponse(content=html_content, status_code=200)


@app.post("/analyze/", response_class=HTMLResponse)
async def analyze_text(text: str = Form(...)):
    # Assuming your model is a function that takes input and returns predictions
    prediction = bot.return_tafsir(text)
    html_content = """
    <html>
        <head>
            <title>Analysis Result</title>
        </head>
        <body>
            <h1>Analysis Result:</h1>
            <p>Topic: {input_text}</p>
            <p>Top 3 results: {prediction}</p>
            <button><a href="/" >Back</a><button>
        </body>
    </html>
    """.format(input_text=text, prediction=prediction)
    return HTMLResponse(content=html_content, status_code=200)

@app.post("/test/")
async def test():
    # Assuming your model is a function that takes input and returns predictions
    prediction = bot.return_tafsir("tolerance")
    print(prediction)
    return "finished"


@app.get("/test/{inputs}")
def greet(inputs):
    return "hello "+inputs

# @app.get("/gradio")
# async def gradio_test():
#     iface = gr.Interface(fn=greet, inputs=  [
#         gr.Textbox(
#             label="Input",
#             info="Find ambiguities in the following",
#             lines=3,
#             value="The test can only continue if it receives all inputs from previous page.",
#         ),
#     ], outputs=  gr.Textbox(
#             label="Input",
#             info="Find ambiguities in the following",
#             lines=3,
#             value="The test can only continue if it receives all inputs from previous page.",
#         ),
#     theme=gr.themes.Base())

    # iface.launch()