import random
from datetime import datetime
import requests
import re

from myproject.config.config_reader import get_current_proxy_config
from myproject.settings import USER_AGENT_LIST, ipPool

# uid:http://bapi.51daili.com/getapi2?linePoolIndex=-1&packid=2&time=2&qty=1&port=1&format=txt&usertype=17&uid=61142
# 用户名密码：https://www.51daili.com/index/user/namepwd.html
#从配置文件读取当前用谁的51代理账号
currentUse = get_current_proxy_config()[1]
#从51代理获取代理IP
class RandomUserAgent(object):
    def __init__(self):
        self.max_retries = 10  # 每个请求的最大重试次数
        self.current_proxy_index = -1  # 当前使用的代理索引
        self.min_ip_pool_size = 14  # IP池最小容量
        # 初始化IP池
        self.refresh_ip_pool()
        print("初始化完毕")

    def process_request(self, request, spider):
        # 设置请求头和代理
        self.ua = random.choice(USER_AGENT_LIST)
        request.headers['User-Agent'] = self.ua
        print("设置代理前", datetime.now())
        request.meta['proxy'] = self.get_next_proxy()
        print("设置代理后", datetime.now())
        # 设置重试次数和错误信息存储
        if 'retries' not in request.meta:
            request.meta['retries'] = 0
        if 'error_info' not in request.meta:
            request.meta['error_info'] = []

    def process_response(self, request, response, spider):
        # 检查响应状态码，处理非200状态码
        print("接收数据后", datetime.now())
        retries = request.meta.get('retries', 0)
        if response.status != 200 or "身份核实" in response.text or "A股：大家做好心理准备了，A股，很可能要迎来大级别的行情" in response.text or response.url=='https://guba.eastmoney.com/error?type=1':
            error_info = f"Status code: {response.status}, URL: {request.url}"
            request.meta['error_info'].append(error_info)
            spider.logger.warning(error_info)
            print("问题页面", response.text)
            self.replace_ip(request.meta['proxy'])
            return self._retry(request, spider, retries)
        print("转发数据前", datetime.now())
        return response

    def process_exception(self, request, exception, spider):
        # 处理请求异常，实现重试机制
        retries = request.meta.get('retries', 0)
        error_info = f"Exception: {str(exception)}, URL: {request.url}"
        request.meta['error_info'].append(error_info)
        spider.logger.warning(error_info)
        print("请求异常，重试")
        # 打印异常堆栈信息，便于调试
        import traceback
        spider.logger.debug(traceback.format_exc())
        self.replace_ip(request.meta['proxy'])
        return self._retry(request, spider, retries)

    def _retry(self, request, spider, retries):
        # 判断是否超过最大重试次数
        if retries < self.max_retries:
            # 增加重试次数
            new_retries = retries + 1
            print(f"重试次数：{new_retries}/{self.max_retries}")
            # 指数退避策略，避免立即重试
            # backoff = 2 #** new_retries
            # time.sleep(backoff)

            # 创建新请求，更新重试次数和代理
            retry_req = request.copy()
            retry_req.meta['retries'] = new_retries
            retry_req.dont_filter = True  # 避免重复请求被过滤
            retry_req.meta['proxy'] = self.get_next_proxy()

            spider.logger.info(f"Retrying {request.url} (attempt {new_retries}/{self.max_retries})")
            return retry_req
        else:
            # 达到最大重试次数，记录错误日志
            # 追加到错误日志中
            with open('errorurl.txt', 'a', encoding='utf-8') as f:
                f.write(request.url+'\n')
            error_info = request.meta.get('error_info', [])
            spider.logger.error(f"Gave up retrying {request.url} after {self.max_retries} "
                                f"attempts. Errors: {', '.join(error_info)}")
            return None
    def refresh_ip_pool(self):
        """刷新IP代理池"""
        print('-------------刷新IP池------------------')
        global ipPool
        ipPool.clear()
        try:

            # 获取新的IP列表，确保获取的IP数量足够
            fetch_count = self.min_ip_pool_size  # 获取更多IP以应对可能的无效IP http://bapi.51daili.com/white-ip/add?appkey=D16F3AF4FFFE28F30597B790A039852C59960&isdel=1&ips=222.26.28.230
            ips = requests.get(
                f'http://bapi.51daili.com/getapi2?linePoolIndex=-1&packid=2&time=11&qty={fetch_count}&port=1&format=json&field=ipport&dt=1&ct=1&usertype=17&uid={currentUse["uid"]}&accessName={currentUse["accessName"]}&accessPassword={currentUse["accessPassword"]}&skey=autoaddwhiteip',
                headers={'User-Agent': random.choice(USER_AGENT_LIST)}, timeout=10)  # 51

            print(ips.text,ips)
            ips.raise_for_status()  # 检查请求是否成功
            # 解析IP地址
            ip_port_list = re.findall(r'"ip":"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+)"',
                                      ips.text)
            ipPool = [f"http://{ipport}" for ipport in ip_port_list]
            print(f"成功获取 {len(ipPool)} 个代理IP", ipPool)

            # 确保IP池大小符合要求
            self.ensure_min_ip_pool_size()

        except Exception as e:
            print(f"获取IP代理时出错: {e}")
            # 出错时使用默认代理，确保IP池不为空
            ipPool = []
            self.ensure_min_ip_pool_size()

    def replace_ip(self, ip):
        """去除失效的IP，并补充新的IP以保持IP池大小"""
        global ipPool
        if ip in ipPool:
            ipPool.remove(ip)
            print(f"移除失效代理IP: {ip}")

            # 补充新IP以保持IP池大小
            self.ensure_min_ip_pool_size()

    def ensure_min_ip_pool_size(self):
        """确保IP池中的IP数量不低于最小值"""
        global ipPool
        while len(ipPool) < self.min_ip_pool_size:
            try:
                ips = requests.get(
                    'http://bapi.51daili.com/getapi2?linePoolIndex=-1&packid=2&time=11&qty=1&port=1&format=json&field=ipport&dt=1&ct=1&usertype=17&uid=62399&accessName=qwsdq&accessPassword=E8538B779F3D0A7379C1302AD4C0055B&skey=autoaddwhiteip',
                    headers={'User-Agent': random.choice(USER_AGENT_LIST)}, timeout=10)
                ips.raise_for_status()
                ip_port_list = re.findall(r'"ip":"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+)"',
                                          ips.text)
                if ip_port_list:
                    new_ip = f"http://{ip_port_list[0]}"
                    if new_ip not in ipPool:
                        ipPool.append(new_ip)
                        print(f"补充新代理IP: {new_ip}")
                else:
                    print("未能获取到有效IP")
                    break  # 避免无限循环
            except Exception as e:
                print(f"获取IP代理时出错: {e}")
                # 添加默认IP避免IP池为空
                if not ipPool:
                    ipPool = ['http://127.0.0.1:8080']
                break
    def get_next_proxy(self):
        """按顺序获取下一个代理"""
        # 确保IP池中有足够的IP
        if len(ipPool) < self.min_ip_pool_size:
            self.ensure_min_ip_pool_size()
        # 按顺序获取IP
        self.current_proxy_index = (self.current_proxy_index + 1) % len(ipPool)
        return ipPool[self.current_proxy_index]
