hasanriaz121
Added Application files
9ca45a3
raw
history blame
1.75 kB
from fastapi import FastAPI, Request, Form
from fastapi.responses import HTMLResponse
import nest_asyncio
import uvicorn
from transformers import pipeline
app = FastAPI()
@app.on_event("startup")
async def startup_event():
model_path="cardiffnlp/twitter-roberta-base-sentiment-latest"
global sentiment_task
sentiment_task = pipeline("sentiment-analysis", model=model_path, tokenizer=model_path)
@app.get("/", response_class=HTMLResponse)
async def home():
html_content = """
<html>
<head>
<title>Text Classification</title>
</head>
<body>
<h1>Text Classification</h1>
<form method="post" action="/analyze/">
<input type="text" name="text" placeholder="Enter text to analyze" autocomplete="off" required>
<input type="submit" value="Analyze">
</form>
</body>
</html>
"""
return HTMLResponse(content=html_content, status_code=200)
@app.get('/{name}')
async def get_name(name: str):
return {'Welcome To Here': f'{name}'}
@app.post("/analyze/", response_class=HTMLResponse)
async def analyze_text(text: str = Form(...)):
# Assuming your model is a function that takes input and returns predictions
prediction = sentiment_task(text)
html_content = """
<html>
<head>
<title>Analysis Result</title>
</head>
<body>
<h1>Analysis Result:</h1>
<p>Input Text: {input_text}</p>
<p>Prediction: {prediction}</p>
<button><a href="/" >Back</a><button>
</body>
</html>
""".format(input_text=text, prediction=prediction[0]['label'])
return HTMLResponse(content=html_content, status_code=200)