File size: 3,422 Bytes
951e868
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50d622e
951e868
 
 
 
 
 
 
 
 
 
50d622e
 
951e868
 
3f958b2
 
 
 
951e868
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# -*- coding: utf-8 -*-

"""
News Source Extractor:

This script is designed to extract the content of news articles from various French media sources.
The URLs of these articles are retrieved from the `base_news` table, where articles marked with 
a `step` value of '0' are pending extraction.

To install the necessary packages:
pip install aiohttp mysql-connector-python

Once extracted, the content of each article is saved locally for further processing. This separation 
of content fetching and processing is intentional to optimize resource management.

The script operates in batches, processing a defined number of entries (`NB_BY_STEP`) at a time.
After extraction, the `step` value of the processed articles is updated to '1' to indicate completion.

Author     : Guillaume Eckendoerffer
Date       : 29-09-23
Repository : https://github.com/Eckendoerffer/TorchTrainerFlow/
             https://huggingface.co/datasets/eckendoerffer/news_fr
"""

import asyncio
import aiohttp
import time
import mysql.connector
import os

# Database configuration
db_config = {
    "host": "[host]",
    "user": "[user]",
    "password": "[passwd]",
    "database": "[database]"
}

NB_BY_STEP = 20
path = os.getcwd()  

def mysqli_return_number(conn, query):
    cursor = conn.cursor()
    cursor.execute(query)
    result = cursor.fetchone()
    cursor.close()
    return result[0] if result else 0

async def fetch_and_save(url, id_source):
    time_start_item = time.time()
    try:
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                byte_content = await response.read()
                try:
                    text_content = byte_content.decode('utf-8')
                except UnicodeDecodeError:
                    text_content = byte_content.decode('ISO-8859-1')
                with open(f"{path}/sources/html_news/{id_source}.txt", "w", encoding="utf-8") as file:
                    file.write(text_content)
                time_end_item = time.time()
                print(f'{id_source}) {time_end_item-time_start_item:.5f} {url}')
    except aiohttp.client_exceptions.TooManyRedirects:
        print(f"Too many redirects for URL: {url}")
    except aiohttp.client_exceptions.ClientConnectorError:
        print(f"Failed to connect to URL: {url}")
    except Exception as e:
        print(f"Unexpected error for URL {url}: {str(e)}")


async def main():
    conn = mysql.connector.connect(**db_config)
    while True:
        time_start = time.time()
        cursor = conn.cursor()
        cursor.execute(f"SELECT `id`, `url` FROM `base_news` WHERE `step`='0' ORDER BY RAND() LIMIT {NB_BY_STEP}")
        rows = cursor.fetchall()
        cursor.close()
        if not rows:
            break
        tasks = []
        for row in rows:
            id_source, url = row
            cursor = conn.cursor()
            cursor.execute(f"UPDATE `base_news` SET `step`='1' WHERE `id`='{id_source}' LIMIT 1")
            cursor.close()
            tasks.append(fetch_and_save(url.strip(), id_source))

        await asyncio.gather(*tasks)

        nb_base = mysqli_return_number(conn, "SELECT COUNT(`id`) FROM `base_news` WHERE `step`='0'")
        time_elapsed = time.time() - time_start
        time_per_item = time_elapsed / NB_BY_STEP
        print(f"Remaining: {nb_base} - Time: {time_per_item:.3f}s/item")

    conn.close()

asyncio.run(main())