# -*- coding: utf-8 -*-
# @Time    : 2019/11/18 15:18
# @Author  : ZSQ
# @Email   : zsq199170918@163.com
# @FileName: proxy_to_redis.py
# @Software: PyCharm
import re
import time
import redis
import requests
import threading
from datetime import datetime
from FDC_spider.settings import PROXY_URL
from FDC_spider.utils.redis_pool import POOL, PROXY_REDIS_KEY, PROXY_NUM


class ProxyToRedis(object):
    def __init__(self, name, code1, code2, code3):
        self.conn = redis.StrictRedis(connection_pool=POOL)
        self.pipeline = self.conn.pipeline(transaction=True)
        self.name, self.code1, self.code2, self.code3, self.proxy_num = (name, code1, code2, code3, PROXY_NUM)

    def check_proxy_ip(self, proxy_ip):
        # 关闭长连接和多余连接
        headers = dict(Connection='close')
        _session = requests.session()
        _session.keep_alive = False

        proxy_dict = dict(
            http="http://{}:{}@{}:{}".format(self.code1, self.code1 + self.code2, proxy_ip, self.code3),
            https="https://{}:{}@{}:{}".format(self.code1, self.code1 + self.code2, proxy_ip, self.code3),
        )
        """修改代理测试逻辑，解决原代理测试网站拒绝访问，导致代理无法正常测试的问题    ---By ZSQ 2021/2/20"""
        # check_resp = requests.get('http://icanhazip.com', proxies=proxy_dict, headers=headers, timeout=1)
        # resp_ip = self.get_check_regex().sub('', check_resp.content.decode())
        check_resp = requests.get('http://dev.kdlapi.com/testproxy', proxies=proxy_dict, headers=headers, timeout=1)
        resp_ip = re.findall(r'\d+\.\d+\.\d+\.\d+', check_resp.content.decode())[0]
        del check_resp
        assert proxy_ip == resp_ip
        return proxy_ip

    def save_to_redis(self, proxy_ip):
        try:
            self.pipeline.multi()
            self.pipeline.lpush(self.name, proxy_ip)
            self.pipeline.ltrim(self.name, 0, self.proxy_num - 1)
            self.pipeline.execute()
        except Exception as e:
            print(self.get_current_time(), '<<Thread_' + threading.current_thread().name + '>>', e)

    @staticmethod
    def get_proxy_ip():
        # 关闭长连接和多余连接
        headers = dict(Connection='close')
        _session = requests.session()
        _session.keep_alive = False

        resp = requests.get(PROXY_URL, headers=headers)
        proxy_ip = resp.content.decode()
        del resp
        return proxy_ip

    @staticmethod
    def get_check_regex():
        regex = re.compile(r'\s')
        return regex

    def get_redis_key_len(self):
        return self.conn.llen(self.name)

    def delete_redis_key(self):
        return self.conn.delete(self.name)

    @staticmethod
    def get_current_time():
        return datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S')

    def run(self):
        while True:
            key_len = self.get_redis_key_len()
            if key_len < self.proxy_num:
                try:
                    proxy_ip = self.get_proxy_ip()
                    try:
                        self.check_proxy_ip(proxy_ip)
                    except:
                        continue
                    else:
                        self.save_to_redis(proxy_ip)
                        print(self.get_current_time(), '<<Thread_' + threading.current_thread().name + '>>', proxy_ip)
                except:
                    time.sleep(1 * 60)  # 添加延时，防止当proxy服务故障时访问过于频繁，使得会话连接数超过上限
                    continue
            else:
                time.sleep(0.5)
                continue

    def main(self):
        self.delete_redis_key()
        for i in range(5):
            _thread = threading.Thread(target=self.run, daemon=True, name=str(i + 1))
            _thread.start()
        while True:
            time.sleep(60 * 5)
            self.delete_redis_key()  # 每隔5分钟清空一次代理池，保证代理ip没有冗余、实时可用


PR = ProxyToRedis(PROXY_REDIS_KEY, 'hifo', '1996', 65500)

if __name__ == '__main__':
    PR.main()
