# coding:utf8
"""
author:shadu@foxmail.com
"""
from bs4 import BeautifulSoup

from core.http_content import get_html_safely
from core.proxyqueue import ProxyList
from core.network import is_ipaddr, is_port
from zeander.zeander import ZLOG

__version__ = '1.0'

SOURCE = 'www.proxydocker.com'
URL = 'https://www.proxydocker.com/en/proxylist/type/HTTPS?page=%d'


def save(proxy=None):
    if proxy and isinstance(proxy, str):
        ProxyList.put(proxy)


def parse(html):
    proxy_count = 0
    bs = BeautifulSoup(html, 'html.parser')
    for bs_a in bs.findAll('a', {'href': True}):
        proxy_href = bs_a.get('href')
        if '/en/proxy/' not in proxy_href:
            continue
        proxy_ip = proxy_href.split(':')[0].split('/')[-1]
        proxy_port = proxy_href.split(':')[-1]
        if is_ipaddr(proxy_ip) and is_port(proxy_port):
            proxy = '%s:%s' % (proxy_ip, proxy_port)
            save(str(proxy))
            proxy_count += 1
    ZLOG.info('Got %d proxies from %s' % (proxy_count, SOURCE))


def collect():
    for offset in range(1, 11):
        url = URL % offset
        html = get_html_safely(url)
        if html == '-':
            ZLOG.alert('Failed to get HTML from %s' % URL)
        else:
            parse(html)


if __name__ == '__main__':
    collect()
