#!/usr/bin/env python
from __future__ import print_function
import sys, time, threading
import urllib2, httplib, zlib
from urlparse import urlparse
from bs4 import BeautifulSoup

class ProxyRequire:

    _threads = 1
    _file_lock = threading.Lock()
    _task_list = []

    def http_get(self, url):
        #GET /nn/2 HTTP/1.1
        #Host: www.xicidaili.com
        #Connection: keep-alive
        #Cache-Control: max-age=0
        #Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8
        #Upgrade-Insecure-Requests: 1
        #User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2540.0 Safari/537.36
        #Referer: http://www.xicidaili.com/nn/
        #Accept-Encoding: gzip, deflate, sdch
        #Accept-Language: zh-CN,zh;q=0.8
        #Cookie: _free_proxy_session=BAh7B0kiD3Nlc3Npb25faWQGOgZFVEkiJWRhZmI5YmVmMzJiYTBlZDM2NDVjZTM3OGU3MGE5NzhlBjsAVEkiEF9jc3JmX3Rva2VuBjsARkkiMW5FWEdpMnZqREFwWmg0bVhRaWxyY3FydkJVM2JhM3ZiVVUyNkdnZS9Sdk09BjsARg%3D%3D--5d621435a838696b2504e52d0cb2948f19671d99; CNZZDATA1256960793=2101768177-1450088595-http%253A%252F%252Fwww.lijiejie.com%252F%7C1450153137
        headers = {
            "Content-type": "application/x-www-form-urlencoded",
            "Accept-Encoding": "gzip, deflate",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2540.0 Safari/537.36"
            }
        ou = urlparse(url)
        if ou.scheme == 'http':
            conn = httplib.HTTPConnection(ou.netloc)
        elif ou.scheme == 'https':
            conn = httplib.HTTPSConnection(ou.netloc)
        conn.request("GET", ou.path, headers=headers)
        r1 = conn.getresponse()
        # r1.status, r1.reason
        return r1.read()

    def get_web_data(self, url):
        try:
            req = urllib2.Request(url)
            req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2540.0 Safari/537.36')
            req.add_header('Accept-Encoding', 'gzip, deflate')
            r = urllib2.urlopen(req)
            respInfo = r.info();
            respHtml = r.read()

            if( ("Content-Encoding" in respInfo) and (respInfo['Content-Encoding'] == "gzip")):
                respHtml = zlib.decompress(respHtml, 16 + zlib.MAX_WBITS);
        except urllib2.HTTPError as e:
            print("code %d, get %s failed" % (e.code, url), file=sys.stderr)
            respHtml = None        
        return respHtml

    def parse_html_kuaidaili(self, url):
        proxys = []
        data = self.get_web_data(url)  
        if data == None:
            return
        soup = BeautifulSoup(data, 'lxml')
        table_rows = soup.table.tbody.tr

        for row in table_rows.next_siblings:
            rs = BeautifulSoup(str(row), 'lxml')
            tds = rs.find_all('td')
            if len(tds) <= 0:
                continue
            proxy = (tds[0], tds[1], tds[3])
            proxys.append(proxy)

        self._task_list.append(dict(fun=self.write_to_file, args=proxys))
        #return proxys

    def get_proxy_from_kuaidaili(self):
        for page in range(1, 804):
            url = "http://www.kuaidaili.com/free/intr/" + str(page) + "/"
            self._task_list.append(dict(fun=self.parse_html_kuaidaili, args=url))

    def parse_html_xicidaili(self, url):
        proxys = []
        html_doc = self.get_web_data(url)
        if html_doc == None:
            return
        soup = BeautifulSoup(html_doc, 'lxml')
        trs = soup.find('table', id='ip_list').find_all('tr')
        for tr in trs[1:]:
            tds = tr.find_all('td')
            ip = tds[2].text.strip()
            port = tds[3].text.strip()
            protocol = tds[6].text.strip()
            proxy = (ip, port, protocol)
            #if protocol == 'HTTP' or protocol == 'HTTPS':
            #    of.write('%s=%s:%s\n' % (protocol, ip, port) )
            #    print( '%s=%s:%s' % (protocol, ip, port))
            proxys.append(proxy)

        self._task_list.append(dict(fun=self.write_to_file, args=proxys))
        #return proxys

    def get_proxy_from_xicidaili(self):
        for page in range(1, 390):
            url = "http://www.xicidaili.com/nn/" + str(page) + "/"
            self._task_list.append(dict(fun=self.parse_html_xicidaili, args=url))

    def isVaildProxy(self, ip, port, type='http'):
        proxy_handler = urllib2.ProxyHandler({type: ip + ":" + str(port)})
        proxy_auth_handler = urllib2.ProxyBasicAuthHandler()
        #proxy_auth_handler.add_password('realm', 'host', 'username', 'password')

        try:
            opener = urllib2.build_opener(proxy_handler, proxy_auth_handler)
            req = opener.open('http://1111.ip138.com/ic.asp')
        except:
            return False    
        return req.getcode() == 200

    def task_runner(self):
        while len(self._task_list) > 0 :
            task = self._task_list.pop(0)
            task['fun'](task['args'])

    def run(self, threads, site):
        self._threads = threads
        
        if site == "1":
            self.get_proxy_from_kuaidaili()
        elif site == "2":
            self.get_proxy_from_xicidaili()
        else:
            print("Input error", file=sys.stderr)
            return

        all_thread = []
        for i in range(threads):
            t = threading.Thread(target=self.task_runner)
            all_thread.append(t)
            t.start()
    
        for t in all_thread:
            t.join()

    def write_to_file(self, proxys):
        self._file_lock.acquire()
        f = open('proxy.csv', 'a')
        for proxy in proxys:
            data = ""
            for p in proxy:
                data += str(p) + ","
            data += '\n'
            f.write(data)
        f.close()
        self._file_lock.release()

if __name__ == "__main__":
    pr = ProxyRequire()
    pr.run(10, '2')

