Anne314159 commited on
Commit
dd1f93b
·
verified ·
1 Parent(s): fd154a5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -7
app.py CHANGED
@@ -7,17 +7,37 @@ from bs4 import BeautifulSoup
7
  generator = pipeline('text-generation', model='dbmdz/german-gpt2')
8
 
9
  # Define a function to fetch trending news related to a specific niche
 
 
 
 
 
 
 
 
10
  def fetch_trending_news(niche):
11
  url = f"https://www.google.com/search?q={niche}+news&tbs=qdr:d"
12
  headers = {
13
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"}
14
- response = requests.get(url, headers=headers)
15
- print("Response status code:", response.status_code) # Debug print
16
- soup = BeautifulSoup(response.content, "html.parser")
17
- news_items = soup.find_all("div", class_="BNeawe vvjwJb AP7Wnd")
18
- print("Number of news items found:", len(news_items)) # Debug print
19
- trending_news = [item.text for item in news_items[:5]] # Extract the top 5 news items
20
- return trending_news
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
  # Define the pages
23
  def page_trending_niche():
 
7
  generator = pipeline('text-generation', model='dbmdz/german-gpt2')
8
 
9
  # Define a function to fetch trending news related to a specific niche
10
+ import streamlit as st
11
+ from transformers import pipeline
12
+ import requests
13
+ from bs4 import BeautifulSoup
14
+
15
+ # Initialize a text generation pipeline
16
+ generator = pipeline('text-generation', model='dbmdz/german-gpt2')
17
+
18
  def fetch_trending_news(niche):
19
  url = f"https://www.google.com/search?q={niche}+news&tbs=qdr:d"
20
  headers = {
21
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"}
22
+ try:
23
+ response = requests.get(url, headers=headers)
24
+ if response.status_code == 200:
25
+ soup = BeautifulSoup(response.content, "html.parser")
26
+ # Adjusted to use more generic selectors that might be more stable
27
+ news_items = soup.find_all("div", class_="ZINbbc xpd O9g5cc uUPGi")
28
+ if not news_items:
29
+ print("No news items found, check your selectors.")
30
+ return []
31
+ trending_news = [item.find("div", class_="BNeawe vvjwJb AP7Wnd").text for item in news_items[:5]]
32
+ return trending_news
33
+ else:
34
+ print(f"Failed to fetch news, status code: {response.status_code}")
35
+ return []
36
+ except Exception as e:
37
+ print(f"Error fetching news: {e}")
38
+ return []
39
+
40
+
41
 
42
  # Define the pages
43
  def page_trending_niche():