# coding:utf8
"""
author:shadu@foxmail.com
"""
from bs4 import BeautifulSoup

from core.http_content import get_html_safely
from core.proxyqueue import ProxyList
from core.network import is_ipaddr, is_port
from zeander.zeander import ZLOG

__version__ = '1.0'

SOURCE = 'proxydb.net'
URL = 'http://proxydb.net/?protocol=http&protocol=https&offset=%d'


def save(proxy=None):
    if proxy and isinstance(proxy, str):
        ProxyList.put(proxy)


def parse(html):
    proxy_count = 0
    bs = BeautifulSoup(html, 'html.parser')
    for bs_script in bs.find('table', {'class': 'table-responsive'}).findAll('script'):
        ip_part1 = '-'
        ip_part2 = '-'
        port = '-'
        for line in bs_script.string.split('\n'):
            try:
                if 'var x = ' in line:
                    ip_part1 = line.split('\'')[1][::-1]
                    continue
                elif 'var y = ' in line:
                    ip_part2 = line.split('\'')[1]
                    continue
                elif 'var p = ' in line:
                    port = line.split('var p = ')[1].split(';')[0]
                    continue
            except Exception, e:
                print e
        if ip_part1 == '-' or ip_part2 == '-' or port == '-':
            continue
        proxy_ip = ip_part1 + ip_part2
        proxy_port = str(eval(port))
        if is_ipaddr(proxy_ip) and is_port(proxy_port):
            proxy = '%s:%s' % (proxy_ip, proxy_port)
            save(str(proxy))
            proxy_count += 1
    ZLOG.info('Got %d proxies from %s' % (proxy_count, SOURCE))


def collect():
    for offset in range(0, 10):
        url = URL % (20 * offset)
        html = get_html_safely(url)
        if html == '-':
            ZLOG.alert('Failed to get HTML from %s' % URL)
        else:
            parse(html)


if __name__ == '__main__':
    collect()
