"""
代理池  将能用的IP地址放入到redis或本地文件中 在要用的时候就随机取出一个
"""
import json
import re

import requests
from tqdm import tqdm


# 构建代理爬虫类爬取免费代理网站的代理IP
class ProxySpider():
    def __init__(self):
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
        }
        """
        初始化代理IP格式 
        把IP与端口作为键 网站来源等做值
        {
        '192.168.127.12:8080':[来源网站,记录]
        }
        """
        self.proxies = {

        }
        self.origin_ip = requests.get('http://httpbin.org/get').json()['origin']  # 获取与运行程序本机ip地址

    def check_ip(self, ip: str) -> bool:
        """
        初步检测代理IP是否能使用，能够正常代理请求，对测试网站发起请求来进行检测
        :param ip:
        :return:
        """
        try:
            check_ip = requests.get('http://httpbin.org/get', proxies={
                'http': 'http://' + ip,
                'https': 'https://' + ip
            }, timeout=10).json()['origin']
        except Exception:
            return False
        else:
            if check_ip == self.origin_ip:
                return False
            else:
                return True

    def get_random_ip(self) -> str:
        """
        在线获取随机ip
        :return:
        """
        res = requests.get('https://proxypool.scrape.center/random', headers=self.headers)
        return res.text


    # 下面就可以写很多获取代理IP的方法
    def get_proxy_list(self) -> int:
        """
        https://www.proxy-list.download/HTTPS
        :return:返回能用的IP个数
        """
        url = 'https://www.proxy-list.download/api/v2/get?l=en&t=https'
        res = requests.get(url, headers=self.headers)
        count = 0
        for i in tqdm(res.json()['LISTA']):
            ip = '%s:%s' % (i['IP'], i['PORT'])
            print(ip, count)
            if self.check_ip(ip):
                self.proxies[ip] = ['www.proxy-list.download']
                count += 1
        return count

    def get_kuaidaili(self, max: int = 30) -> int:
        """
        https://www.kuaidaili.com/free/intr/1/
        爬取快代理的免费代理ip
        :param max:需要的代理ip个数
        :return:
        """
        count = 0
        session = requests.Session()
        session.headers = self.headers

        for page in range(1, 7405):
            url = f'https://www.kuaidaili.com/free/intr/{page}'
            res = session.get(url)

            data_str = re.findall(r'const fpsList = (.*?);', res.text)[0]
            for i in json.loads(data_str):
                ip = '%s:%s' % (i['ip'], i['port'])
                if self.check_ip(ip):
                    self.proxies[ip] = ['www.proxy-list.download']
                    count += 1
                    if count >= max:
                        return count

                print(ip, count)

    def save_ip_json(self, path: str) -> None:
        """
        持久化保存代理IP
        :param path:
        :return:
        """
        with open(f'proxy_ips/{path}', 'w+', encoding='utf-8') as f:
            json.dump(self.proxies, f, ensure_ascii=False, indent=4)  # 写入json格式数据到f文件中


        # with open(path, 'w+', encoding='utf-8') as f:
        #     json.dump(self.proxies, f, ensure_ascii=False, indent=4)  # 写入json格式数据到f文件中


if __name__ == '__main__':
    client = ProxySpider()
    # count = client.get_proxy_list()
    # print(count)
    # client.save_ip_json('proxy_list_ip.json')

    count = client.get_kuaidaili()
    print(count)
    client.save_ip_json('kuaidaili_ip.json')
