# FastAPI app for HF Spaces: 3 sentiment models + tiny test UI from fastapi import FastAPI from fastapi.responses import HTMLResponse from pydantic import BaseModel from typing import List, Dict, Any try: import torch DEVICE = 0 if torch.cuda.is_available() else -1 except Exception: DEVICE = -1 from transformers import AutoTokenizer, AutoModelForSequenceClassification, TextClassificationPipeline # ---- Models ---- FABSA_ID = "Anudeep-Narala/fabsa-roberta-sentiment" # 3-class TWITTER_ID = "cardiffnlp/twitter-roberta-base-sentiment-latest" # 3-class MOOD_ID = "Priyanshuchaudhary2425/MoodMeter-sentimental-analysis" # 2-class (pos/neg) def load_pipe(model_id: str) -> TextClassificationPipeline: tok = AutoTokenizer.from_pretrained(model_id) mdl = AutoModelForSequenceClassification.from_pretrained(model_id) return TextClassificationPipeline( model=mdl, tokenizer=tok, device=DEVICE, return_all_scores=True, truncation=True ) print("Loading models…") fabsa = load_pipe(FABSA_ID) twitter = load_pipe(TWITTER_ID) mood = load_pipe(MOOD_ID) print("Models ready.") def norm3(scores: List[Dict[str, Any]]): """Map list[{label,score}] -> (pred, scores3, top, margin).""" out = {"negative": 0.0, "neutral": 0.0, "positive": 0.0} for e in scores: lbl = e["label"].lower() s = float(e["score"]) if "neg" in lbl or lbl == "label_0": out["negative"] = s elif "neu" in lbl or lbl == "label_1": out["neutral"] = s elif "pos" in lbl or lbl == "label_2": out["positive"] = s pred = max(out, key=out.get) vals = sorted(out.values(), reverse=True) top, margin = float(out[pred]), float(vals[0]-vals[1]) return pred, out, top, margin def norm2(scores: List[Dict[str, Any]]): """Embed 2-class model into 3-class dict (neutral=0).""" d = {"negative": 0.0, "positive": 0.0} for e in scores: lbl = e["label"].lower() s = float(e["score"]) if "neg" in lbl or lbl == "label_0": d["negative"] = s elif "pos" in lbl or lbl == "label_1": d["positive"] = s pred = "negative" if d["negative"] >= d["positive"] else "positive" top = float(max(d.values())) margin = float(abs(d["negative"] - d["positive"])) out3 = {"negative": d["negative"], "neutral": 0.0, "positive": d["positive"]} return pred, out3, top, margin def fuse(fabsa_label: str, twitter_label: str) -> str: # Ensemble rule you asked for: # FABSA=neg -> neg; else if Twitter=neu -> neu; else Twitter if fabsa_label == "negative": return "negative" if twitter_label == "neutral": return "neutral" return twitter_label app = FastAPI(title="HF Space — Sentiment Inference (FABSA + MoodMeter + Twitter)") class PredictIn(BaseModel): text: str class BatchIn(BaseModel): texts: List[str] @app.get("/health") def health(): return {"ok": True, "device": DEVICE} @app.post("/predict") def predict(inp: PredictIn): t = (inp.text or "").strip() f_raw = fabsa(t)[0] t_raw = twitter(t)[0] m_raw = mood(t)[0] f_pred, f_scores, f_top, f_margin = norm3(f_raw) t_pred, t_scores, t_top, t_margin = norm3(t_raw) m_pred, m_scores, m_top, m_margin = norm2(m_raw) return { "text": t, "fabsa": {"label": f_pred, "scores": f_scores, "top": f_top, "margin": f_margin}, "twitter": {"label": t_pred, "scores": t_scores, "top": t_top, "margin": t_margin}, "mood": {"label": m_pred, "scores": m_scores, "top": m_top, "margin": m_margin}, "ensemble": {"label": fuse(f_pred, t_pred)} } @app.post("/batch") def batch(inp: BatchIn): texts = [(x or "").strip() for x in inp.texts] f_raw = fabsa(texts, batch_size=16) t_raw = twitter(texts, batch_size=16) m_raw = mood(texts, batch_size=16) out = [] for i, t in enumerate(texts): f_pred, f_scores, f_top, f_margin = norm3(f_raw[i]) t_pred, t_scores, t_top, t_margin = norm3(t_raw[i]) m_pred, m_scores, m_top, m_margin = norm2(m_raw[i]) out.append({ "text": t, "fabsa": {"label": f_pred, "scores": f_scores, "top": f_top, "margin": f_margin}, "twitter": {"label": t_pred, "scores": t_scores, "top": t_top, "margin": t_margin}, "mood": {"label": m_pred, "scores": m_scores, "top": m_top, "margin": m_margin}, "ensemble": {"label": fuse(f_pred, t_pred)} }) return {"items": out} # --- Super-simple test page (no backend needed) --- INDEX_HTML = """