#

import requests
from bs4 import BeautifulSoup as BS
import asyncio
import functools
import re
from proxydb import ProxyDB
from settings import config


ip_pattern = re.compile(r'([0-9]{1,3}\.){3}[0-9]{1,3}')
port_pattern = re.compile('[1-9][0-9]{0,4}')
scheme_pattern = re.compile('https?')

headers = {
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'zh-CN,zh;q=0.9',
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3704.400 QQBrowser/10.4.3587.400'
}

prdb = ProxyDB(db=config['db'], save_path=config['save_path'])


class CrawlMetaclass(type):
    def __new__(cls, name, bases, attrs):
        attrs['__CrawlFunc__'] = []
        count = 0
        for k in attrs:
            if k.startswith('crawl_'):
                attrs['__CrawlFunc__'].append(k)
                count += 1
        attrs['__CrawlCount__'] = count
        return type.__new__(cls, name, bases, attrs)


class Crawler(object, metaclass=CrawlMetaclass):
    def __init__(self):
        self.db = prdb

    def flush(self, key, score=config['flush_score']):
        if prdb.zcount(config['proxy_key']) > config['proxy_threshold']:
            print('代理池数量超出')
            prdb.zflush(config['proxy_key'])
        if prdb.zcount(config['proxy_key']) > config['proxy_threshold']:
            print('代理池饱和')
            return False
        return True

    async def get_one_page(self, url, proxies=None, timeout=15):
        try:
            loop = asyncio.get_running_loop()
            r = await loop.run_in_executor(
                None, functools.partial(
                    requests.get, url, proxies=proxies, headers=headers, timeout=timeout
                )
            )
            if r.status_code == 200:
                r.encoding = r.apparent_encoding
                r.encoding = 'utf8'
                return r.text
        except Exception:
            pass
        return ''

    def get_proxy(self, node_list):
        for node in node_list:
            node = re.sub(r'\s+', ',', node.text).split(',')
            ip, port, scheme = node[1], node[2], node[4].lower()
            if ip_pattern.match(ip) and port_pattern.match(port):
                if not scheme_pattern.match(scheme):
                    scheme = 'http'
                proxy = f'{scheme}:{ip}:{port}'
                rv = self.db.zadd(
                    config['proxy_key'],
                    (proxy, config['initial_score']))
                if rv:
                    print('添加新代理', rv)

    async def crawl_iphai(self):
        url = 'http://www.iphai.com/'
        html = await self.get_one_page(url)
        if not html:
            return ''
        html = BS(html, 'lxml')
        node_list = html.select(
            'body > div.container > div.table-responsive.module > table > tr')
        self.get_proxy(node_list)

    async def crawl_iphaing(self):
        url = 'http://www.iphai.com/free/ng'
        html = await self.get_one_page(url)
        if not html:
            return ''
        html = BS(html, 'lxml')
        node_list = html.select(
            'body > div.container.main-container > div.table-responsive.module > table > tr')
        self.get_proxy(node_list)

    async def crawl_iphaiwg(self):
        url = 'http://www.iphai.com/free/wg'
        html = await self.get_one_page(url)
        if not html:
            return ''
        html = BS(html, 'lxml')
        node_list = html.select(
            'body > div.container.main-container > div.table-responsive.module > table > tr')
        self.get_proxy(node_list)

    async def run(self):
        print('开始爬取代理')
        await asyncio.gather(
            self.crawl_iphai(),
            self.crawl_iphaing(),
            self.crawl_iphaiwg(),
        )
        self.db.save()
        print('代理爬取完毕')


class Tester(object):
    def __init__(self):
        self.db = prdb

    async def test_one_proxy(self, proxy=None, timeout=15):
        url = config['test_url']
        scheme, ip, port = proxy[0].split(':')
        proxies = {
            scheme: f'{ip}:{port}'
        }
        try:
            loop = asyncio.get_running_loop()
            r = await loop.run_in_executor(
                None, functools.partial(
                    requests.get, url, proxies=proxies, headers=headers, timeout=timeout
                )
            )
            if r.status_code == 200:
                rv = self.db.zmax(config['proxy_key'], proxy)
                if rv:
                    print('可用代理', rv)
                    return rv
        except Exception:
            pass
        rv = self.db.zdecrease(config['proxy_key'], proxy)
        print('不可用代理', rv)
        return rv

    async def test_all_proxy(self, size=config['test_size']):
        all_proxy = self.db.zall(config['proxy_key'])
        while not all_proxy:
            await asyncio.sleep(1)
            all_proxy = self.db.zall(config['proxy_key'])
        count = len(all_proxy)
        for i in range(0, count - 1, size):
            start = i
            stop = min(i + size, count - 1)
            print(f'test {start} --> {stop} proxy')
            await asyncio.gather(
                *[self.test_one_proxy(proxy=proxy, timeout=7) for proxy in all_proxy[start:stop]]
            )

    async def run(self):
        print('开始测试代理')
        await self.test_all_proxy()
        self.db.save()
        print('代理测试完毕')


async def main():
    crawler = Crawler()
    tester = Tester()
    await asyncio.gather(
        crawler.run(),
        tester.run()
    )
    print(prdb.zall(config['proxy_key']))

if __name__ == "__main__":
    asyncio.run(main())
