Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,105 +1,136 @@
|
|
1 |
import gradio as gr
|
2 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
-
|
5 |
-
|
|
|
|
|
6 |
|
7 |
-
|
8 |
-
user_data = {}
|
9 |
|
10 |
-
|
11 |
-
"""
|
12 |
-
Fetch news articles based on user preferences using the Guardian API
|
13 |
-
"""
|
14 |
-
params = {
|
15 |
-
"api-key": GUARDIAN_API_KEY,
|
16 |
-
"q": ",".join(interests),
|
17 |
-
"page-size": 3,
|
18 |
-
"order-by": "newest",
|
19 |
-
"show-fields": "headline,standfirst,thumbnail"
|
20 |
-
}
|
21 |
|
22 |
-
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
|
26 |
-
|
27 |
-
return "\n\n".join([article["fields"]["headline"] + "\n" + article["fields"]["standfirst"] for article in articles])
|
28 |
-
else:
|
29 |
-
return "Error fetching news articles."
|
30 |
-
|
31 |
-
def generate_summary(text, max_length=130):
|
32 |
-
"""
|
33 |
-
Generate summary for the provided text
|
34 |
-
"""
|
35 |
-
if not text:
|
36 |
-
return "Please enter some text to summarize!"
|
37 |
-
# For now, just return a portion of the text
|
38 |
-
return f"Demo summary (first {max_length} characters):\n{text[:max_length]}"
|
39 |
|
40 |
-
def
|
41 |
-
"""
|
42 |
-
Save user preferences for personalization
|
43 |
-
"""
|
44 |
if not name or not language or not interests:
|
45 |
return "Please fill in all fields!"
|
46 |
-
user_data["name"] = name
|
47 |
-
user_data["language"] = language
|
48 |
-
user_data["interests"] = interests
|
49 |
-
return f"Welcome {name}! Your preferences have been saved. Language: {language}, Interests: {', '.join(interests)}"
|
50 |
-
|
51 |
-
def get_daily_news_summary():
|
52 |
-
"""
|
53 |
-
Fetch and summarize the daily news for the user
|
54 |
-
"""
|
55 |
-
if "name" not in user_data or "language" not in user_data or "interests" not in user_data:
|
56 |
-
return "Please set your preferences first."
|
57 |
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
with
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
)
|
|
|
|
|
|
|
|
|
75 |
|
76 |
-
|
|
|
|
|
|
|
|
|
|
|
77 |
language_dropdown = gr.Dropdown(
|
78 |
-
choices=["English", "
|
79 |
label="Preferred Language"
|
80 |
)
|
81 |
interests_checkboxes = gr.CheckboxGroup(
|
82 |
-
choices=["Technology", "
|
83 |
label="News Interests"
|
84 |
)
|
85 |
-
|
86 |
preferences_output = gr.Textbox(label="Status")
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
inputs=[name_input, language_dropdown, interests_checkboxes],
|
91 |
outputs=[preferences_output]
|
92 |
)
|
93 |
-
|
94 |
-
with gr.
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
)
|
101 |
|
102 |
-
|
|
|
103 |
|
104 |
|
105 |
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
+
import feedparser
|
4 |
+
import threading
|
5 |
+
from datetime import datetime
|
6 |
+
import json
|
7 |
+
import os
|
8 |
+
|
9 |
+
# Initialize Llama 2 model and tokenizer
|
10 |
+
model_name = "meta-llama-2-1b" # Replace with your actual model name
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
12 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
13 |
+
|
14 |
+
def fetch_news_from_rss(interests):
|
15 |
+
"""Fetch news from RSS feeds based on interests"""
|
16 |
+
# Example RSS feeds - you can expand this list
|
17 |
+
rss_feeds = {
|
18 |
+
"Technology": "https://feeds.feedburner.com/TechCrunch",
|
19 |
+
"Business": "https://feeds.feedburner.com/BusinessInsider",
|
20 |
+
"Sports": "https://www.espn.com/espn/rss/news",
|
21 |
+
}
|
22 |
+
|
23 |
+
articles = []
|
24 |
+
for interest in interests:
|
25 |
+
if interest in rss_feeds:
|
26 |
+
feed = feedparser.parse(rss_feeds[interest])
|
27 |
+
articles.extend(feed.entries[:3]) # Get top 3 articles per interest
|
28 |
+
|
29 |
+
return articles
|
30 |
|
31 |
+
def generate_summary(text, language="English"):
|
32 |
+
"""Generate summary using Llama 2"""
|
33 |
+
prompt = f"""Please provide a concise summary of the following news article in {language}.
|
34 |
+
Focus on the main points and key details:
|
35 |
|
36 |
+
Article: {text}
|
|
|
37 |
|
38 |
+
Summary:"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
|
40 |
+
inputs = tokenizer(prompt, return_tensors="pt", max_length=512, truncation=True)
|
41 |
+
outputs = model.generate(
|
42 |
+
inputs["input_ids"],
|
43 |
+
max_length=150,
|
44 |
+
min_length=50,
|
45 |
+
temperature=0.7,
|
46 |
+
num_return_sequences=1
|
47 |
+
)
|
48 |
|
49 |
+
summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
50 |
+
return summary.split("Summary:")[1].strip()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
+
def save_user_preferences(name, language, interests):
|
53 |
+
"""Save user preferences to a JSON file"""
|
|
|
|
|
54 |
if not name or not language or not interests:
|
55 |
return "Please fill in all fields!"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
+
preferences = {
|
58 |
+
"name": name,
|
59 |
+
"language": language,
|
60 |
+
"interests": interests,
|
61 |
+
"last_updated": datetime.now().isoformat()
|
62 |
+
}
|
63 |
+
|
64 |
+
with open(f"preferences_{name}.json", "w") as f:
|
65 |
+
json.dump(preferences, f)
|
66 |
+
|
67 |
+
return f"Preferences saved for {name}!"
|
68 |
+
|
69 |
+
def get_personalized_summary(name):
|
70 |
+
"""Get personalized news summary based on user preferences"""
|
71 |
+
try:
|
72 |
+
with open(f"preferences_{name}.json", "r") as f:
|
73 |
+
preferences = json.load(f)
|
74 |
+
except FileNotFoundError:
|
75 |
+
return "Please set your preferences first!"
|
76 |
+
|
77 |
+
# Fetch news based on interests
|
78 |
+
articles = fetch_news_from_rss(preferences["interests"])
|
79 |
+
|
80 |
+
summaries = []
|
81 |
+
for article in articles:
|
82 |
+
title = article.get("title", "")
|
83 |
+
content = article.get("description", "")
|
84 |
+
|
85 |
+
# Generate summary using Llama 2
|
86 |
+
summary = generate_summary(
|
87 |
+
content,
|
88 |
+
language=preferences["language"]
|
89 |
)
|
90 |
+
|
91 |
+
summaries.append(f"📰 {title}\n\n{summary}\n\n---")
|
92 |
+
|
93 |
+
return "\n".join(summaries)
|
94 |
|
95 |
+
# Create Gradio interface
|
96 |
+
with gr.Blocks(title="Llama 2 News Summarizer") as demo:
|
97 |
+
gr.Markdown("# 📰 AI News Summarizer powered by Llama 2")
|
98 |
+
|
99 |
+
with gr.Tab("Set Preferences"):
|
100 |
+
name_input = gr.Textbox(label="Your Name")
|
101 |
language_dropdown = gr.Dropdown(
|
102 |
+
choices=["English", "Spanish", "French", "Arabic"],
|
103 |
label="Preferred Language"
|
104 |
)
|
105 |
interests_checkboxes = gr.CheckboxGroup(
|
106 |
+
choices=["Technology", "Business", "Sports", "Science", "Politics"],
|
107 |
label="News Interests"
|
108 |
)
|
109 |
+
save_button = gr.Button("Save Preferences")
|
110 |
preferences_output = gr.Textbox(label="Status")
|
111 |
+
|
112 |
+
save_button.click(
|
113 |
+
save_user_preferences,
|
114 |
inputs=[name_input, language_dropdown, interests_checkboxes],
|
115 |
outputs=[preferences_output]
|
116 |
)
|
117 |
+
|
118 |
+
with gr.Tab("Get News Summary"):
|
119 |
+
name_check = gr.Textbox(label="Enter your name to get summary")
|
120 |
+
get_summary_button = gr.Button("Get Summary")
|
121 |
+
summary_output = gr.Textbox(
|
122 |
+
label="Your Personalized News Summary",
|
123 |
+
lines=10
|
124 |
+
)
|
125 |
+
|
126 |
+
get_summary_button.click(
|
127 |
+
get_personalized_summary,
|
128 |
+
inputs=[name_check],
|
129 |
+
outputs=[summary_output]
|
130 |
)
|
131 |
|
132 |
+
if __name__ == "__main__":
|
133 |
+
demo.launch()
|
134 |
|
135 |
|
136 |
|