added first version
Browse files
app.py
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import torch
|
3 |
+
import gradio as gr
|
4 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
+
from datetime import datetime
|
6 |
+
|
7 |
+
# GPT-2 setup
|
8 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
9 |
+
model_name = "gpt2"
|
10 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
11 |
+
model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
|
12 |
+
|
13 |
+
# NewsAPI Setup (Replace with your own API key)
|
14 |
+
news_api_key = "35cbd14c45184a109fc2bbb5fff7fb1b" # Replace with your NewsAPI key
|
15 |
+
|
16 |
+
def fetch_trending_topics(search_term="artificial intelligence OR machine learning", page=1, page_size=9):
|
17 |
+
try:
|
18 |
+
# Fetch AI and Machine Learning related news from NewsAPI with search term
|
19 |
+
url = f"https://newsapi.org/v2/everything?q={search_term}&sortBy=publishedAt&pageSize={page_size + 5}&page={page}&language=en&apiKey={news_api_key}" # Fetch extra to avoid duplicates
|
20 |
+
response = requests.get(url)
|
21 |
+
data = response.json()
|
22 |
+
|
23 |
+
# Check for valid response
|
24 |
+
if response.status_code == 200 and "articles" in data:
|
25 |
+
# Collect articles without duplicates
|
26 |
+
trending_topics = []
|
27 |
+
seen_titles = set()
|
28 |
+
for article in data["articles"]:
|
29 |
+
title = article["title"]
|
30 |
+
if title not in seen_titles: # Avoid duplicate titles
|
31 |
+
seen_titles.add(title)
|
32 |
+
trending_topics.append({
|
33 |
+
"title": title,
|
34 |
+
"description": article["description"] if article["description"] else "No description available.",
|
35 |
+
"url": article["url"],
|
36 |
+
"publishedAt": article["publishedAt"],
|
37 |
+
})
|
38 |
+
|
39 |
+
if not trending_topics:
|
40 |
+
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}]
|
41 |
+
|
42 |
+
return trending_topics
|
43 |
+
else:
|
44 |
+
print(f"Error: {data.get('message', 'No articles found')}")
|
45 |
+
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}]
|
46 |
+
except Exception as e:
|
47 |
+
print(f"Error fetching news: {e}")
|
48 |
+
return [{"title": "Error fetching news", "description": "", "url": "", "publishedAt": ""}]
|
49 |
+
|
50 |
+
# Analyze the trending topic using GPT-2
|
51 |
+
def generate_analysis(trending_topic):
|
52 |
+
input_text = f"Provide a concise analysis about the following topic: '{trending_topic['title']}'. Please summarize its significance in the AI and Machine Learning field."
|
53 |
+
|
54 |
+
# Tokenize and generate text with a max limit on tokens
|
55 |
+
inputs = tokenizer(input_text, return_tensors="pt").to(device)
|
56 |
+
outputs = model.generate(**inputs, max_length=80, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95)
|
57 |
+
|
58 |
+
analysis = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
59 |
+
|
60 |
+
return analysis
|
61 |
+
|
62 |
+
# Combine both functions for Gradio
|
63 |
+
def analyze_trends(page=1, page_size=9):
|
64 |
+
search_term = "artificial intelligence OR machine learning" # Fixed search term
|
65 |
+
trending_topics = fetch_trending_topics(search_term=search_term, page=page, page_size=page_size)
|
66 |
+
topic_analysis = []
|
67 |
+
|
68 |
+
for topic in trending_topics:
|
69 |
+
if topic["title"] not in ["Error fetching news", "No news available"]:
|
70 |
+
analysis = generate_analysis(topic)
|
71 |
+
topic_analysis.append({
|
72 |
+
"title": topic["title"],
|
73 |
+
"description": topic["description"],
|
74 |
+
"analysis": analysis,
|
75 |
+
"url": topic["url"],
|
76 |
+
"publishedAt": topic["publishedAt"],
|
77 |
+
})
|
78 |
+
else:
|
79 |
+
topic_analysis.append({
|
80 |
+
"title": topic["title"],
|
81 |
+
"description": topic["description"],
|
82 |
+
"analysis": "Unable to retrieve or analyze data.",
|
83 |
+
"url": topic["url"],
|
84 |
+
"publishedAt": topic["publishedAt"],
|
85 |
+
})
|
86 |
+
|
87 |
+
# Limit the results to the specified page size
|
88 |
+
return topic_analysis[:page_size] # Ensure only the specified number of articles are returned
|
89 |
+
|
90 |
+
# Gradio UI with 3 Columns Layout for Displaying News
|
91 |
+
def display_news_cards(page=1, page_size=9):
|
92 |
+
analysis_results = analyze_trends(page=page, page_size=page_size)
|
93 |
+
current_date = datetime.now().strftime("%d-%m-%Y") # Format: DD-MM-YYYY
|
94 |
+
|
95 |
+
display = f"### **AI & Machine Learning News for {current_date}**\n\n"
|
96 |
+
|
97 |
+
# Create a 3-column layout
|
98 |
+
display += "<div style='display:flex; flex-wrap:wrap; justify-content:space-between;'>"
|
99 |
+
for news_item in analysis_results:
|
100 |
+
# Each news box in a flex box with equal width
|
101 |
+
display += f"""
|
102 |
+
<div style='flex: 1 1 30%; border:1px solid black; margin:10px; padding:10px; box-sizing:border-box;'>
|
103 |
+
<b>{news_item['title']}</b><br/>
|
104 |
+
<i>{news_item['publishedAt']}</i><br/><br/>
|
105 |
+
{news_item['description']}<br/><br/>
|
106 |
+
<a href='{news_item['url']}' target='_blank'>Read more</a><br/><br/>
|
107 |
+
<b>Analysis:</b> {news_item['analysis']}<br/><br/>
|
108 |
+
</div>
|
109 |
+
"""
|
110 |
+
display += "</div>"
|
111 |
+
|
112 |
+
return display
|
113 |
+
|
114 |
+
# Gradio UI with Header, Search Option, and Submit Button
|
115 |
+
def gradio_interface():
|
116 |
+
with gr.Blocks() as demo:
|
117 |
+
# Header with background colour
|
118 |
+
gr.Markdown("""<h1 style='text-align:center; color:white; background-color:#007BFF; padding:20px; border-radius:10px;'>AI & Machine Learning News Analyzer</h1>""", elem_id="header")
|
119 |
+
|
120 |
+
# Fixed search term displayed to the user
|
121 |
+
gr.Markdown("<p style='text-align:center;'>Search term: <b>artificial intelligence OR machine learning</b></p>")
|
122 |
+
|
123 |
+
# Sliders for page number and news per page
|
124 |
+
page = gr.Slider(minimum=1, maximum=5, step=1, label="Page Number", value=1)
|
125 |
+
page_size = gr.Slider(minimum=6, maximum=15, step=3, label="News per Page", value=9)
|
126 |
+
|
127 |
+
# Button to fetch and analyze news
|
128 |
+
analyze_button = gr.Button("Submit")
|
129 |
+
|
130 |
+
# Output area for displaying the news
|
131 |
+
news_output = gr.HTML()
|
132 |
+
|
133 |
+
# Link the button click to the display function
|
134 |
+
analyze_button.click(display_news_cards, inputs=[page, page_size], outputs=news_output)
|
135 |
+
|
136 |
+
return demo
|
137 |
+
|
138 |
+
# Launch the Gradio UI
|
139 |
+
if __name__ == "__main__":
|
140 |
+
gradio_interface().launch()
|