# coding:utf8
"""
author:shadu@foxmail.com
"""
from bs4 import BeautifulSoup

from core.http_content import get_html_safely
from core.proxyqueue import ProxyList
from core.network import is_ipaddr, is_port
from zeander.zeander import ZLOG

SOURCE = 'list.proxylistplus.com'
URL1 = 'https://list.proxylistplus.com/ssl-List-%d'
URL2 = 'https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-%d'

def save(proxy=None):
    if proxy and isinstance(proxy, str):
        ProxyList.put(proxy)


def parse(html):
    proxy_count = 0
    bs = BeautifulSoup(html, 'html.parser')
    for bs_tr in bs.find('table', {'class': 'bg'}).findAll('tr', {'class': 'cells'}):
        bs_tds = bs_tr.findAll('td')
        proxy_ip = bs_tds[1].string
        proxy_port = bs_tds[2].string
        if is_ipaddr(proxy_ip) and is_port(proxy_port):
            proxy = '%s:%s' % (proxy_ip, proxy_port)
            save(str(proxy))
            proxy_count += 1
    ZLOG.info('Got %d proxies from %s' % (proxy_count, SOURCE))


def collect1():
    for id in range(1,3):
        url = URL1 % id
        html = get_html_safely(url)
        if html == '-':
            ZLOG.alert('Failed to get HTML from %s' % url)
        else:
            parse(html)


def collect2():
    for id in range(1,6):
        url = URL2 % id
        html = get_html_safely(url)
        if html == '-':
            ZLOG.alert('Failed to get HTML from %s' % url)
        else:
            parse(html)


def collect():
    collect1()
    collect2()


if __name__ == '__main__':
    collect()