import os import gradio as gr import requests from bs4 import BeautifulSoup API_URL = "https://api-inference.huggingface.co/models/cardiffnlp/twitter-roberta-base-sentiment" HF_TOKEN = os.getenv("HF_TOKEN") headers = {"Authorization": f"Bearer {HF_TOKEN}"} label_map = { "LABEL_0": "Negative", "LABEL_1": "Neutral", "LABEL_2": "Positive" } def fetch_url_text(url): try: headers_req = {'User-Agent': 'Mozilla/5.0'} response = requests.get(url, headers=headers_req, timeout=10) response.raise_for_status() soup = BeautifulSoup(response.text, "html.parser") return soup.get_text() except Exception as e: return f"URL error: {e}" def analyze_sentiment(text_input, file_upload, url_input): text = "" if file_upload: try: with open(file_upload.name, "r", encoding="utf-8") as f: text = f.read() except Exception as e: return f"❌ File read error: {e}" elif url_input: text = fetch_url_text(url_input) if "URL error" in text: return text elif text_input: text = text_input else: return "⚠️ Please provide input." payload = {"inputs": text[:1000]} response = requests.post(API_URL, headers=headers, json=payload) try: results = response.json()[0] top_result = max(results, key=lambda r: r["score"]) sentiment = label_map[top_result["label"]] score = top_result["score"] return f"🧠 Sentiment: {sentiment} ({score:.2%})" except Exception as e: return f"❌ JSON parse error: {e}" demo = gr.Interface( fn=analyze_sentiment, inputs=[ gr.Textbox(label="Enter Text", lines=3, placeholder="Type text here..."), gr.File(label="Upload a .txt File", file_types=[".txt"]), gr.Textbox(label="Enter Webpage URL", placeholder="https://...") ], outputs="text", title="Multi-Input Sentiment Analyzer", description="Analyze sentiment from input text, a text file, or a webpage using Hugging Face." ) demo.launch()