File size: 2,105 Bytes
edcbb99
f46c4e5
 
 
 
 
edcbb99
f46c4e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
import gradio as gr
import requests
from bs4 import BeautifulSoup

API_URL = "https://api-inference.huggingface.co/models/cardiffnlp/twitter-roberta-base-sentiment"
HF_TOKEN = os.getenv("HF_TOKEN")
headers = {"Authorization": f"Bearer {HF_TOKEN}"}

label_map = {
    "LABEL_0": "Negative",
    "LABEL_1": "Neutral",
    "LABEL_2": "Positive"
}

def fetch_url_text(url):
    try:
        headers_req = {'User-Agent': 'Mozilla/5.0'}
        response = requests.get(url, headers=headers_req, timeout=10)
        response.raise_for_status()
        soup = BeautifulSoup(response.text, "html.parser")
        return soup.get_text()
    except Exception as e:
        return f"URL error: {e}"

def analyze_sentiment(text_input, file_upload, url_input):
    text = ""
    if file_upload:
        try:
            with open(file_upload.name, "r", encoding="utf-8") as f:
                text = f.read()
        except Exception as e:
            return f"❌ File read error: {e}"
    elif url_input:
        text = fetch_url_text(url_input)
        if "URL error" in text:
            return text
    elif text_input:
        text = text_input
    else:
        return "⚠️ Please provide input."

    payload = {"inputs": text[:1000]}
    response = requests.post(API_URL, headers=headers, json=payload)

    try:
        results = response.json()[0]
        top_result = max(results, key=lambda r: r["score"])
        sentiment = label_map[top_result["label"]]
        score = top_result["score"]
        return f"🧠 Sentiment: {sentiment} ({score:.2%})"
    except Exception as e:
        return f"❌ JSON parse error: {e}"

demo = gr.Interface(
    fn=analyze_sentiment,
    inputs=[
        gr.Textbox(label="Enter Text", lines=3, placeholder="Type text here..."),
        gr.File(label="Upload a .txt File", file_types=[".txt"]),
        gr.Textbox(label="Enter Webpage URL", placeholder="https://...")
    ],
    outputs="text",
    title="Multi-Input Sentiment Analyzer",
    description="Analyze sentiment from input text, a text file, or a webpage using Hugging Face."
)

demo.launch()