# coding:utf8
"""
author:shadu@foxmail.com
"""
import base64
from bs4 import BeautifulSoup

from core.http_content import get_html_safely
from core.proxyqueue import ProxyList
from core.network import is_ipaddr, is_port
from zeander.zeander import ZLOG

__version__ = '1.0'

SOURCE = 'www.cool-proxy.net'
URL = 'https://www.cool-proxy.net/proxies/http_proxy_list/sort:score/direction:desc/page:%d'


def save(proxy=None):
    if proxy and isinstance(proxy, str):
        ProxyList.put(proxy)


def decode_cool_proxy_ip(proxy_ip_encoded):
    # document.write(Base64.decode(str_rot13("ZwNmYwRlZl4lZwxhZmt="))) 203.123.229.38
    original_ip_addr = '-'

    if 'str_rot13' not in proxy_ip_encoded:
        ZLOG.alert('str_rot13 not found in %s' % SOURCE)
        return original_ip_addr

    offset = 13
    rot_encoded_str = proxy_ip_encoded.split('"')[1]
    base64_encoded_char_list = []

    for vchar in list(rot_encoded_str):
        vchar_lower = str(vchar.lower())
        if 'a' <= vchar_lower <= 'z':
            if vchar_lower < 'n':
                base64_encoded_char_list.append(chr(ord(vchar) + offset))
            else:
                base64_encoded_char_list.append(chr(ord(vchar) - offset))
        else:
            base64_encoded_char_list.append(vchar)

    base64_encoded_ipaddr = ''.join(base64_encoded_char_list)
    original_ip_addr = base64.decodestring(base64_encoded_ipaddr)

    return original_ip_addr


def parse(html):
    proxy_count = 0
    bs = BeautifulSoup(html, 'html.parser')
    for bs_tr in bs.find(id='main').find('table').findAll('tr'):
        bs_tds = bs_tr.findAll('td')
        if bs_tds and len(bs_tds) == 10:
            script_str = bs_tds[0].find('script').string
            if 'document.write(Base64.decode(' not in script_str:
                continue

            proxy_ip_encoded = script_str.replace('document.write(Base64.decode(', '').replace('))', '')
            proxy_ip = decode_cool_proxy_ip(proxy_ip_encoded)
            proxy_port = bs_tds[1].string

            if is_ipaddr(proxy_ip) and is_port(proxy_port):
                proxy = '%s:%s' % (str(proxy_ip), str(proxy_port))
                save(str(proxy))
                proxy_count += 1
    ZLOG.info('Got %d proxies from %s' % (proxy_count, SOURCE))


def collect():
    for offset in range(0, 21):
        url = URL % offset
        html = get_html_safely(url)
        if html == '-':
            ZLOG.alert('Failed to get HTML from %s' % URL)
        else:
            parse(html)


if __name__ == '__main__':
    # print decode_cool_proxy_ip('str_rot13("ZwNmYwRlZl4lZwxhZmt=")')
    collect()
