import requests
from bs4 import BeautifulSoup
import re
from proxy_getter.random_headers import RandomFakeHeaders
from ippool.redis_ippool import IPPool
import time
from util.log import logger
import sys


class GetProxy(object):
    def __init__(self):
        self.__URL_PREFIX = "https://www.kuaidaili.com/free/inha/%d"
        self.__TEST_URL = "http://httpbin.org/get"

    @staticmethod
    def get_html(url, headers, proxies=False, retry_times=3):
        response = None
        for cnt in range(retry_times):
            # noinspection PyBroadException
            try:
                if not proxies:
                    response = requests.get(url=url, headers=headers, timeout=5)
                else:
                    response = requests.get(url=url, headers=headers, proxies=proxies, timeout=5)
                break
            except Exception as e:
                logger.exception(e)
                continue
        if response is None:
            logger.warning("get response failed, please check the url: {}", url)

        # noinspection PyBroadException
        try:
            html = response.content.decode("utf-8")
            return html
        except Exception as e:
            logger.exception(e)
            return None

    @staticmethod
    def parse_html2ip_list(html):

        ip_list = []
        soup = BeautifulSoup(html, "html.parser")
        tds = soup.find_all("td")

        for index, td in enumerate(tds):
            # use regex to match proxy_ip in tds
            if re.match(r"^\d+\.\d+\.\d+\.\d+$", re.sub(r"\s+|\n+|\t+", "", td.text)):
                # the elements appear in sequence on the website, like "163.204.240.21, 9999, 广东, 高匿, HTTP"
                # each represents proxy_ip, port, province, type and protocol
                item = list()
                item.append(re.sub(r"\s+|\n+|\t+", "", td.text))
                item.append(re.sub(r"\s+|\n+|\t+", "", tds[index + 1].text))
                item.append(re.sub(r"\s+|\n+|\t+", "", tds[index + 2].text))
                item.append(re.sub(r"\s+|\n+|\t+", "", tds[index + 3].text))
                item.append(re.sub(r"\s+|\n+|\t+", "", tds[index + 4].text))
                ip_list.append(item)
        return ip_list

    def ip_validation(self, raw_ip):
        """chenck  the validity of ip in ip_list
        """
        validation_flag = True

        # construct parameter
        ip_with_port = str(raw_ip[0]) + ":" + str(raw_ip[1])
        proxies = {"https": "https://" + ip_with_port}
        headers = RandomFakeHeaders().random_headers_for_validation()

        try:
            response = requests.get(url=self.__TEST_URL, headers=headers, proxies=proxies, timeout=5)
        except Exception as e:
            logger.exception(e)
            return False

        if response.status_code != 200:
            validation_flag = False

        return validation_flag

    def save_ip2redis(self, ip_list):
        ip_cnt = len(ip_list)
        cnt = 0  # calculate the ip number we put into redis
        for index, ip in enumerate(ip_list):
            # test validation
            if self.ip_validation(ip) and ip[2] == '高匿名':
                IPPool().insert_ip(ip)
                cnt += 1
        logger.info("get {} ip, the effective rate is {:.2f}%", cnt, cnt / ip_cnt * 100)

    def gen_proxy_ip_pool(self, start_page=0, end_page=1):
        urls = [self.__URL_PREFIX % (index + 1) for index in range(start_page, end_page)]
        url_cnt = len(urls)

        for index, url in enumerate(urls):
            logger.info("now parse the {} url，the overall progress is {}/{}, url:{}", index + 1, index + 1, url_cnt, url)

            headers = RandomFakeHeaders().random_headers_for_xici()
            ip = IPPool().get_random_key()
            proxies = {"http": "http://" + ip}

            response = self.get_html(url=url, headers=headers, proxies=proxies)
            ip_list = self.parse_html2ip_list(response)
            if len(ip_list) == 0:
                logger.warning("the ip_list is empty, please check the url:{}", url)
                return None
            self.save_ip2redis(ip_list)
            logger.info("we already crawl {} url, overall progress is {:.2f}%", index + 1, (index + 1) / url_cnt * 100)
            logger.info("now sleep for 30s...")
            time.sleep(30)


if __name__ == "__main__":
    # 在控制台打印日志
    logger.remove()
    logger.add(
        sys.stderr, colorize=True,
        format="[<green>{time:YYYY-MM-DDTHH:mm:ss}</green>]<level>{message}</level>",
        level="DEBUG")
    logger.info("get the html of kuaidaili...")
    res = GetProxy().get_html("https://www.kuaidaili.com/free/inha/7", headers=RandomFakeHeaders().random_headers_for_xici())

    logger.info("parse the html to get ip list...")
    ips = GetProxy().parse_html2ip_list(res)
    ip = ips[5]

    logger.info("test the ip validation test of {}", ip)
    val_flag = GetProxy().ip_validation(ip)
    if val_flag:
        logger.info("this ip is available")
    else:
        logger.warning("please check other ip or modify the func defination")

    logger.info("Test: test the save_ip2redis func:")
    GetProxy().save_ip2redis(ips)
