#coding:utf-8
from urllib import request
from fake_useragent import FakeUserAgent
from bs4 import BeautifulSoup
import re
import time
from thread_pool import thread_pool

def chekout_proxy(ip):
    ip = {'http': ip}
    proxy = request.ProxyHandler(ip)
    opener = request.build_opener(proxy)
    ua = FakeUserAgent()
    url = 'http://movie.douban.com/'
    headinfo = {'User-Agent': ua.random}
    reqhd = request.Request(url, headers=headinfo)
    try:
        req = opener.open(reqhd, timeout=3)
    except Exception as e:
        #print ('invalid ip:', ip, e)
        return
    if req.code == 200:
        print ('valid ip:', ip)
        return ip

class GetProxy(object):
    def __init__(self, url = ''):
        self.baseurl = url
        self.ua = FakeUserAgent()
        self.pools = []

    def getIps(self):
        return self.pools

    def getCharset(self, content):
        scon = str(content)
        meta = re.search(r'<meta(.*?)charset(.*?)>', scon, re.I)
        if meta:
            s = meta.group()
            m = re.search(r'charset="([^"]+)"', s, re.I)
            if m:
                charset = m.groups()[0]
                return charset
        return 'utf-8'

    def reqPage(self, url):
        time.sleep(2)
        headinfo = {'UserAgent': self.ua.random}
        reqhd = request.Request(url, headers=headinfo)
        try:
            res = request.urlopen(reqhd)
        except Exception as e:
            print ('Error:', e)
        if res.code != 200:
            return

        con = res.read()
        charset = self.getCharset(con)
        print (charset)

        try:
            con = con.decode(charset)
        except Exception as e:
            print ('decode Error:', e)
            return
        return con

    def parsePage(self, url):
        con = self.reqPage(url)
        obj = BeautifulSoup(con, 'html5lib')
        table = obj.find('table', class_="table table-bordered table-striped")
        tbody = table.find('tbody')
        listtr = tbody.find_all('tr')
        for tr in listtr:
            tds = list(tr.stripped_strings)
            ip = ':'.join(tds[:2])
            print (ip)
            self.pools.append(ip)


    def start(self):
        con = self.reqPage(self.baseurl)
        obj = BeautifulSoup(con, 'html5lib')
        div = obj.find('div', id="listnav")

        urls = []
        if div:
            # div.find('font', color="#FF0000").getText()[1:-1]
            curPage = div.find('font', color="#FF0000")
            if curPage:
                page = curPage.getText()[1:-1]
                href = f"?stype=1&page={page}"
                urls.append(href)


            lista = div.find_all('a')
            if lista:
                for a in lista:
                    step = a.get('href')
                    if step:
                        urls.append(step)

        for url in urls:
            print(url)
            self.parsePage(self.baseurl + url)

if __name__ == '__main__':
    url ='http://www.ip3366.net/'
    getProxy = GetProxy(url = url)
    getProxy.start()

    ips = getProxy.getIps()
    print (len(ips))

    validips = []
    tpools = thread_pool(50)
    for ip in ips:
        tpools.add_task(chekout_proxy, ip)

    stime = time.time()
    tpools.start()
    tpools.join()
    etime = time.time()
    rs = tpools.get_result()
    print('valid ips:', len(rs))
    for ip in rs:
        print(ip)
    print(stime, etime)

