File size: 2,025 Bytes
2aed3fd
 
 
 
0b7d949
2aed3fd
 
 
 
 
 
 
 
 
 
 
 
 
0b7d949
 
2aed3fd
 
 
 
 
 
0b7d949
 
28e882c
0b7d949
 
2aed3fd
 
 
 
 
 
 
0b7d949
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2aed3fd
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# cache_init.py
import os
import redis
import requests
import hashlib
from dotenv import load_dotenv

load_dotenv()

CATEGORIES = [
    "technology", "business", "science", "health", "world", "entertainment"
]

GNEWS_API_KEY = os.getenv("GNEWS_API_KEY")
REDIS_URL = os.getenv("UPSTASH_REDIS_URL")

r = redis.Redis.from_url(REDIS_URL, decode_responses=True)

def generate_id(url: str) -> str:
    return hashlib.sha1(url.encode()).hexdigest()

def fetch_and_cache_articles():
    print("[INIT] Fetching and caching articles...")
    for category in CATEGORIES:
        base_url = "https://gnews.io/api/v4/top-headlines"
        params = {
            "topic": category,
            "lang": "en",
            "max": 20,
            "expand": "content",
            "token": GNEWS_API_KEY
        }
        try:
            response = requests.get(base_url, params=params, timeout=10)
            response.raise_for_status()
            articles = response.json().get("articles", [])

            for article in articles:
                article_id = generate_id(article["url"])
                if not r.exists(f"article:{article_id}"):
                    article_data = {
                        "id": article_id,
                        "title": article["title"],
                        "url": article["url"],
                        "description": article.get("description"),
                        "content": article.get("content"),
                        "image": article.get("image"),
                        "publishedAt": article["publishedAt"],
                        "category": category,
                        "source": article["source"]["name"]
                    }
                    r.hset(f"article:{article_id}", mapping=article_data)
                    r.sadd(f"category:{category}", article_id)

        except Exception as e:
            print(f"[ERROR] Failed for category {category}: {e}")

    print("[INIT] Article caching complete.")


if __name__ == "__main__":
    fetch_and_cache_articles()