eckendoerffer commited on
Commit
78679f7
·
1 Parent(s): 00fe717

Update extract_news/4_extract_news_url.py

Browse files
Files changed (1) hide show
  1. extract_news/4_extract_news_url.py +19 -15
extract_news/4_extract_news_url.py CHANGED
@@ -52,32 +52,20 @@ def mysqli_return_number(conn, query, params=None):
52
  cursor.close()
53
  return result[0] if result else 0
54
 
55
- def process_news_source():
56
  global formatted_keys
57
-
58
- cursor = conn.cursor()
59
- query = ("SELECT `id`, `url`, `media` FROM `base_news` WHERE `link`='0' AND `step` > 0 ORDER BY Rand() LIMIT 1")
60
- cursor.execute(query)
61
- row = cursor.fetchone()
62
-
63
- if not row:
64
- return 'No unprocessed news source found.'
65
 
66
- id_source, url_source, id_media = row
67
  dom = get_dom_path(url_source)
68
  cursor.execute(f"UPDATE `base_news` SET `link`='1' WHERE `id`='{id_source}' LIMIT 1")
69
  conn.commit()
70
 
71
- querys = "SELECT COUNT(`id`) FROM `base_news` WHERE `step`='0'"
72
- nb_link = mysqli_return_number(conn, querys)
73
-
74
  file_path = f"sources/html_news/{id_source}.txt"
75
  if os.path.exists(file_path):
76
  html_content = get_html_content(file_path)
77
  else:
78
  return
79
 
80
- print(f"{nb_link} {url_source} {id_media} ({len(html_content)})")
81
 
82
  soup = BeautifulSoup(html_content, 'html.parser')
83
  nb_add = 0
@@ -118,8 +106,24 @@ def process_news_source():
118
  #print(Fore.RED + url)
119
  continue
120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  while True:
122
- process_news_source()
123
 
124
  conn.close()
125
 
 
52
  cursor.close()
53
  return result[0] if result else 0
54
 
55
+ def process_news_source(id_source, url_source, id_media):
56
  global formatted_keys
 
 
 
 
 
 
 
 
57
 
 
58
  dom = get_dom_path(url_source)
59
  cursor.execute(f"UPDATE `base_news` SET `link`='1' WHERE `id`='{id_source}' LIMIT 1")
60
  conn.commit()
61
 
 
 
 
62
  file_path = f"sources/html_news/{id_source}.txt"
63
  if os.path.exists(file_path):
64
  html_content = get_html_content(file_path)
65
  else:
66
  return
67
 
68
+ print(f"{id_source} {url_source} {id_media} ({len(html_content)})")
69
 
70
  soup = BeautifulSoup(html_content, 'html.parser')
71
  nb_add = 0
 
106
  #print(Fore.RED + url)
107
  continue
108
 
109
+ def process():
110
+ global formatted_keys
111
+
112
+ cursor = conn.cursor()
113
+ query = ("SELECT `id`, `url`, `media` FROM `base_news` WHERE `link`='0' AND `step` > 0 ORDER BY Rand() LIMIT 1000")
114
+ cursor.execute(query)
115
+ rows = cursor.fetchall()
116
+
117
+ if not rows:
118
+ print('No unprocessed news source found.')
119
+
120
+ # Sinon, parcourir les enregistrements et effectuer le traitement
121
+ for row in rows:
122
+ id_source, url_source, id_media = row
123
+ process_news_source(id_source, url_source, id_media)
124
+
125
  while True:
126
+ process()
127
 
128
  conn.close()
129