import pickle
import threading
import time

import bs4
import requests

from Crawler import Crawler, tar_urls, Parser
from fake_useragent import UserAgent


class ProxiesIP:
    min_num = 1
    __rand_seed = 0
    __read_lock = threading.Lock()

    def __init__(self):
        self.__proxies_ip_lock = threading.RLock()
        try:
            with open('proxies_IPs', 'rb') as f:
                self.__proxies_ip = pickle.load(f)
        except FileNotFoundError:
            self.__proxies_ip = {'http': set(), 'https': set()}

    def add_ip(self, _tar_ip, _type='http'):
        # print(f'Get new proxy IP {_tar_ip},type {_type}, have {len(self.__proxies_ip[_type])}')
        with self.__proxies_ip_lock:
            self.__proxies_ip[_type].add(_tar_ip)

    def del_ip(self, _tar_ip, _type='http'):
        # print(f'Del proxy IP {_tar_ip},type {_type}, have {len(self.__proxies_ip[_type])}')
        with self.__read_lock:
            with self.__proxies_ip_lock:
                if _tar_ip in self.__proxies_ip[_type]:
                    self.__proxies_ip[_type].remove(_tar_ip)
                    if len(self.__proxies_ip[_type]) < ProxiesIP.min_num:
                        with ProxiesIpCrawler.condition:
                            ProxiesIpCrawler.condition.notify_all()

    def persistence(self):
        with self.__proxies_ip_lock:
            with open('proxies_IPs', 'wb') as tar_file:
                pickle.dump(self.__proxies_ip, tar_file)

    def get_random_proxies_ip(self, _type='http'):
        with ProxiesIP.__read_lock:
            while len(self.__proxies_ip[_type]) < ProxiesIP.min_num:
                # print("Proxies ip num %d" % len(self.__proxies_ip[_type]))
                ProxiesIpCrawler.search_type = _type
                with ProxiesIpCrawler.condition:
                    ProxiesIpCrawler.condition.notify_all()

                time.sleep(5)

            time.sleep(1 / max(len(self.__proxies_ip[_type]), 1))
            with self.__proxies_ip_lock:
                _ = list(self.__proxies_ip[_type])
                seed = ProxiesIP.__rand_seed % len(_)
                ProxiesIP.__rand_seed += 1
                ProxiesIP.__rand_seed %= len(_)
            return {
                _type: _[seed]
            }

    def get_size(self, _type='http'):
        return len(self.__proxies_ip[_type])

    def __str__(self):
        return str(self.__proxies_ip)


class ProxiesIpCrawler(threading.Thread):
    condition = threading.Condition()
    search_type = 'http'

    def __init__(self):
        super().__init__()
        self.end_flag = False
        self.end_flag_lock = threading.Lock()
        self.begin_page = 1

    @classmethod
    def crawl_proxies_ip(cls, begin_page=1, search_num=5):
        """
        爬取代理ip
        快代理免费IP代理 https://www.kuaidaili.com/free/inha/1/
        :param begin_page: 开始爬取的页数
        :param search_num: 连续爬取页数
        :return:
        """
        syn = threading.Semaphore(0)
        end_page = begin_page + search_num
        for i in range(begin_page, end_page):
            crawler = Crawler(tar_urls['free_ip_proxies'] % i, Parser(ProxiesIpCrawler.parse_free_proxies_ip), syn)
            crawler.start()
            time.sleep(3)
        for i in range(begin_page, end_page), "Thread %d parse_proxies_ip":
            syn.acquire()
        # print(f'Tar ip: {proxies_ip_pool}')
        proxies_ip_pool.persistence()

    @classmethod
    def parse_free_proxies_ip(cls, b_s: bs4.BeautifulSoup):
        """
        解析快代理免费IP代理 https://www.kuaidaili.com/free/inha/1/
        :return:
        """
        ip_tags = b_s.find_all(attrs={'data-title': 'IP'})
        port_tags = b_s.find_all(attrs={'data-title': 'PORT'})
        syn = threading.Semaphore(0)
        for i, ip_tag in enumerate(ip_tags):
            tar_ip = ip_tag.contents[0]
            tar_port = port_tags[i].contents[0]
            tar_proxy = tar_ip + ':' + tar_port

            thread = ProxiesIpTestAndSet(tar_proxy, syn)
            thread.start()

        for i in range(0, len(ip_tags)):
            syn.acquire()
        return proxies_ip_pool

    def run(self) -> None:
        while True:
            if proxies_ip_pool.get_size(ProxiesIpCrawler.search_type) > ProxiesIP.min_num:
                with ProxiesIpCrawler.condition:
                    ProxiesIpCrawler.condition.wait(10)
                    continue
            if ProxiesIpCrawler.search_type == 'http':
                ProxiesIpCrawler.crawl_proxies_ip(begin_page=self.begin_page)
                self.begin_page += 5
                self.begin_page %= 4900
            elif ProxiesIpCrawler.search_type == 'https':
                self.get_https_ip()
            with self.end_flag_lock:
                if self.end_flag:
                    return

    def get_https_ip(self):
        pass
        # TODO

    def end(self):
        with self.end_flag_lock:
            self.end_flag = True


class ProxiesIpTestAndSet(threading.Thread):
    ip_crawler_thread_semaphore = threading.Semaphore(10)

    def __init__(self, tar_ip, syn=threading.Semaphore(), _type='http'):
        super().__init__()
        # print('test ip:' + tar_ip)
        self.tar_ip = tar_ip
        self.syn = syn
        self.type = _type

    def proxies_ip_test(self):
        tar_ip = self.tar_ip
        proxy = {
            'http': tar_ip
        }
        try:
            # print('testing:' + tar_ip)
            headers = {'User-Agent': ua.random}
            response = requests.get(f'{self.type}://httpbin.org/ip', proxies=proxy, headers=headers, timeout=8)
            # print('for ip ' + tar_ip + ' response is:' + str(response))
            if response.ok:
                proxies_ip_pool.add_ip(tar_ip, self.type)

        except requests.RequestException as e:
            # print(e)
            pass

    def run(self) -> None:
        with ProxiesIpTestAndSet.ip_crawler_thread_semaphore:
            self.proxies_ip_test()
            self.syn.release()



ua = UserAgent()
proxies_ip_pool = ProxiesIP()
ip_crawler = ProxiesIpCrawler()