import json
import re
import time
import requests
from loguru import logger
import setting


class MainSpider:
    def __init__(self):
        self.headers = {
            "HOST": "mp.weixin.qq.com",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36"
        }
        self.cookie = ""
        try:
            with open("cookie.txt", "r") as f:
                self.cookie = f.read().strip()
        except Exception as e:
            logger.error(f"读取 cookie.txt 失败: {e}")
            self.cookie = ""

        if not self.cookie or len(self.cookie) < 10:
            logger.critical("cookie 为空或格式异常，请检查 cookie.txt！")
            raise SystemExit(1)


        self.token = self.get_token()
        self.headers['cookie'] = self.cookie

        self.getMsgListParam = {
            'token': self.token,
            "sub":"list",
            "search_field":"null",
            'lang': 'zh_CN',
            'f': 'json',
            'ajax': '1',
            'begin': '0',
            'count': '5',
            'query': '',
            'fakeid': '',
            "free_publish_type":"1",
            "sub_action":"list_ex",
            'type': '101_1'
        }

    def get_token(self):
        url = 'https://mp.weixin.qq.com'
        try:
            response = requests.get(url=url, headers={"Cookie": self.cookie}, timeout=10)
            if response.status_code == 200 or response.status_code == 302:
                match = re.findall(r'token=(\d+)', str(response.url))
                if match:
                    logger.info(f'获取到token：{match[0]}')
                    return match[0]
            logger.critical("未能获取到 token，cookie 可能已失效！")
            raise SystemExit(1)
        except Exception as e:
            logger.error(f"获取token失败: {e}")
            raise SystemExit(1)

    def run(self, name: str,):
        if self.token == '':
            logger.error("请重新登录，并复制 cookie 到 cookie.txt 文件中!")
            return
        if not self.cookie:
            logger.error("cookie 不能为空!")
            return
        self.search_gzh(name)

    def search_gzh(self, name: str):
        params = {
            "action": "search_biz", "begin": 0, "count": 5, "query": name, "token": self.token, "lang": "zh_CN", "f": "json", "ajax": 1
        }
        # try:
        res = requests.get("https://mp.weixin.qq.com/cgi-bin/searchbiz", params=params, headers=self.headers, timeout=10)
        if res.status_code == 200:
            data = res.json()
            err_msg = data.get('base_resp',{}).get('err_msg','')
            wxlist = data.get('list', [])
            if err_msg == 'freq control':
                logger.warning("速度太快，被限额了，重新获取token！")
                self.token = self.get_token()
                time.sleep(60)
                return
            for item in wxlist:
                if item['nickname'] == name or item['alias'] == name:
                    fakeid = item['fakeid']
                    self.__getWXMsgList(fakeid, name)
                    return
            logger.warning(f"未查询到公众号: {name}")
        elif res.status_code == 420:
            logger.critical("接口限制或token失效，采集终止！")
            raise SystemExit(1)
        else:
            logger.error(f"查询公众号失败: {res.status_code}")
        # except Exception as e:
        #     logger.error(f"查询公众号异常: {e}")

    def __getWXMsgList(self, fakeid: str, name: str,):
        appmsgUrl = 'https://mp.weixin.qq.com/cgi-bin/appmsgpublish'
        total_count = 0
        success_count = 0
        fail_count = 0
        use_zset = not setting.ONCE_MODE
        limit = 20 if setting.ONCE_MODE else 5
        begin = (setting.START_PAGE - 1) * limit
        while True:
            page = int((begin / limit))
            if not setting.ONCE_MODE and page >= setting.CAIJI_PAGE:
                # 未开启一次性采集，则根据 setting.CAIJI_PAGE 采集页数
                logger.info(f"采集完成! 共采集 {setting.CAIJI_PAGE} 页")
                break
            logger.info(f"-----第{page + 1}页-----")
            self.getMsgListParam['begin'] = str(begin)
            self.getMsgListParam['count'] = limit
            self.getMsgListParam['fakeid'] = fakeid
            try:
                msgListResponse = requests.get(appmsgUrl, headers=self.headers, params=self.getMsgListParam, timeout=10)
                data = msgListResponse.json()
                
                err_msg = data.get('base_resp', {'err_msg': None}).get('err_msg')
                if err_msg == 'freq control':
                    logger.warning("速度太快，等待5秒钟!")
                    self.token = self.get_token()
                    time.sleep(60)
                if err_msg and 'invalid' in err_msg:
                    logger.critical(f"token 失效或接口异常: {err_msg}")
                    raise SystemExit(1)
                publish_page = data.get('publish_page')
                if not publish_page:
                    logger.warning("未获取到内容，跳过!")
                    continue
                publish_page = json.loads(publish_page)
                total_count = publish_page['total_count']
                
                logger.info(f"[{name}] 文章总数: {total_count}")
                if begin >= total_count:
                    logger.info(f"{name} 一次性采集完毕!")
                    break
                for publish_item in publish_page['publish_list']:
                    publish_info = publish_item.get('publish_info','{}') or '{}'
                    publish_info = json.loads(publish_info)
                    for item in publish_info.get('appmsgex',[]):
                        try:
                            aid = item.get('aid')
                            now = int(item.get('update_time', item.get('create_time', 0)))
                            if use_zset:
                                setting.RDB.zadd(f"{setting.REDIS_PREFIX}:{name}:list", mapping={aid: now})
                                setting.RDB.set(f"{setting.REDIS_PREFIX}:{name}:article:{aid}", json.dumps(item, ensure_ascii=False))
                            else:
                                setting.RDB.lpush(f"{setting.REDIS_PREFIX}:{name}:article_list", json.dumps(item, ensure_ascii=False))
                            msgLink = item.get('link')
                            msgTitle = item.get('title')
                            logger.info(f"{msgTitle}: {msgLink}")
                            success_count += 1
                        except Exception as e:
                            logger.error(f"写入Redis失败: {e}")
                            fail_count += 1
                begin += limit
                time.sleep(3)
            except Exception as e:
                logger.error(f"采集文章列表异常: {e}")
                fail_count += 1
        logger.success(f"[{name}] 本轮采集完成，成功: {success_count}，失败: {fail_count}")

if __name__ == "__main__":
    logger.add("spider.log", rotation="10 MB", encoding="utf-8", enqueue=True, retention="10 days")
    main = MainSpider()
    if setting.ONCE_MODE:
        for name in setting.NAMES:
            logger.info(f"开始采集 {name} 公众号文章... (一次性模式)")
            setting.RDB.set(f"{setting.REDIS_PREFIX}:{name}:config", json.dumps({"es_host": setting.ES_HOST, "es_index": setting.ES_INDEX, "other_map": setting.OTHER_MAP, "name": name, "once": True, "save_img_num": setting.SAVE_IMAGE_NUM, "minio_bucket": setting.MINIO_BUCKET, "img_dir": setting.MINIO_IMAGE_DIR}, ensure_ascii=False))
            main.run(name)
            logger.info(f"结束采集，睡眠 20 秒！")
            time.sleep(20)
    else:
        while True:
            for name in setting.NAMES:
                logger.info(f"开始采集 {name} 公众号文章... (定时循环模式)")
                setting.RDB.set(f"{setting.REDIS_PREFIX}:{name}:config", json.dumps({"es_host": setting.ES_HOST, "es_index": setting.ES_INDEX, "other_map": setting.OTHER_MAP, "name": name, "save_img_num": setting.SAVE_IMAGE_NUM, "minio_bucket": setting.MINIO_BUCKET, "img_dir": setting.MINIO_IMAGE_DIR}, ensure_ascii=False))
                main.run(name)
                logger.info(f"结束采集，睡眠 10 秒！")
                time.sleep(10)
            logger.info(f"全部采集完成，等待{setting.CAIJI_INTERVAL_HOURS}小时 继续下次执行！")
            time.sleep(setting.CAIJI_INTERVAL_HOURS * 3600) 