#! /usr/bin/env python
# -*- coding: utf-8 -*-

import requests
import lxml.html

import urllib

import re

import sys


def loop_for_parse(t):
    result = []
    support_scheme = ["http",
                      "socks4",
                      "socks5"]
    try:
        assert t in support_scheme
    except:
        raise NotImplementedError
    url = 'http://www.proxylists.net/cn_1_ext.html'
    print "[*] Retrieving from china_list"
    r = requests.get(url)
    root = lxml.html.fromstring(r.content)
    pages = root.xpath('//tr/td/b/a/text()')
    for page in pages:
        url = 'http://www.proxylists.net/cn_' + page + '_ext.html'
        try:
            print "[*] Parsing %s now" % url
            proxy = parse_proxy_list(url, t)
        except Exception, e:
            print e
            break
        result += proxy
    return result


def parse_proxy_list(url, t):
    r = requests.get(url)
    assert r.ok
    #----------------------------------------
    proxy_list = []
    if t == "http":
        include = ["Transparent", "Anonymous"]
    else:
        include = []
    root = lxml.html.fromstring(r.content)
    js_codes = root.xpath('//td/script/text()')
    ports = root.xpath('//table[2]/tr/td[2]/text()')
    typs = root.xpath('//table[2]/tr/td[3]/text()')
    pattern = r'\d+\.\d+\.\d+\.\d+'
    for js_code, port, typ in zip(js_codes, ports, typs):
        if typ not in include:
            # print "Type Wrong %s" % tp
            continue
        ip = re.findall(pattern, urllib.unquote(js_code))[0]
        # print ip, port
        proxy_list.append(ip + ':' + port)
    return proxy_list

if __name__ == '__main__':
    t = sys.argv[1]
    proxy_list = loop_for_parse(t)
    for i in range(len(proxy_list)):
        # print proxy_list[i]
        proxy_list[i] += '\n'
    with open('proxy_list/' + t + '_proxy_proxylists.txt', 'wb') as f:
        f.writelines(proxy_list)
