import traceback
from crawlers.utils import fetch_json_data, parse_timestamp
from crawlers.constants import (
    QQNEWS_BASE_URL,
    QQNEWS_HOT_NEWS_URL,
    QQNEWS_HOT_QA_URL,
    REQUEST_HEADERS,
)


def _extract_hot_news(json_data, order):
    """从JSON_DATA中提取热点信息

    Parameters:
        element: 热点元素
        order: 热点顺序

    Returns:
        热点信息字典
    """
    authors = []
    if json_data.get("card"):
        authors.append({
            "name": json_data["source"],
            "url": f"{QQNEWS_BASE_URL}/omn/author/{json_data['card']['suid']}",
        })
    return {
        "order": order,
        "title": json_data.get("title"),
        "url": json_data.get("url"),
        "intro": json_data.get("abstract"),
        "photo_url": json_data.get("thumbnails_qqnews_photo", [""])[0],
        "authors": authors,
        "hot": json_data["hotEvent"]["hotScore"],
        "comment_count": json_data.get("commentNum"),
        "likes": json_data.get("likeInfo"),
        "share_count": json_data.get("shareCount"),
        "created_at": parse_timestamp(json_data.get("timestamp")),
    }


def get_hot_news_from_qqnews():
    """获取热点新闻数据"""

    # 获取数据
    try:
        # 置顶热点
        hot_gov_news = []
        # 热点
        hot_news = []
        json_data = fetch_json_data(
            QQNEWS_HOT_NEWS_URL, headers=REQUEST_HEADERS, data=None
        )
        news_list = json_data["idlist"][0]["newslist"]

        if len(news_list) > 1:
            # 置顶热点
            gov_item = news_list[1]
            gov_news = _extract_hot_news(gov_item, 0)
            hot_gov_news.append(gov_news)
        # 其他热点
        for index, item in enumerate(news_list[2:], 1):
            news = _extract_hot_news(item, index)
            hot_news.append(news)

        return hot_gov_news, hot_news

    except Exception as e:
        print(f"获取热搜数据异常: {e}")
        traceback.print_exc()
        raise


def get_hot_qa_from_qqnews():
    """获取热问"""
    try:
        page_news_list = fetch_json_data(QQNEWS_HOT_QA_URL, REQUEST_HEADERS)

        hot_qa_list = []
        for index, item in enumerate(page_news_list["hot_questions"], 1):
            qa = {
                "order": index,
                "title": item.get("title"),
                "url": item.get("url"),
                "photo_url": item.get("image"),
                "answerer": item.get("answer_name"),
                "answer": item.get("main_points"),
                "likes": item.get("approve_num"),
                "comment_count": item.get("comment_num"),
            }
            hot_qa_list.append(qa)
    except Exception as e:
        print(f"获取热问数据异常: {e}")
        traceback.print_exc()
        raise

    return hot_qa_list

def main():
    try:
        hot_gov_search, hot_spot_list = get_hot_news_from_qqnews()
        print("腾讯新闻热点:")
        print("置顶热点:")
        for hot_search in hot_gov_search:
            print(hot_search)
        print("热点:")
        for i, hot_search in enumerate(hot_spot_list, 1):
            print(f"{i}: {hot_search}")

        print("\n腾讯新闻热问:")
        for i, hot_search in enumerate(get_hot_qa_from_qqnews(), 1):
            print(f"{i}: {hot_search}")
    except Exception as e:
        print(f"程序异常: {e}")
        traceback.print_exc()
        raise

if __name__ == "__main__":
    main()
