#-*- coding:utf8 -*-
import urllib2
import redis
import time
from lxml import etree
import requests
host = 'localhost'
port = '6379'
db = 0
r = redis.Redis(host=host, port=port, db=db)
#proxy is string  example '175.16.220.62:80'
def testWriteProxy(proxy):
    proxy_handler = urllib2.ProxyHandler({'http':proxy.split('//')[1], 'https':proxy.split('//')[1]})
    opener = urllib2.build_opener(proxy_handler)
    urllib2.install_opener(opener)
    # request website
    try:
        response = urllib2.urlopen("http://ip.chinaz.com/getip.aspx", timeout=3).read()
        rl = len(response)
        if rl>33 and rl<128:
          print proxy
          r.lpush('UIPProxy', proxy)
    except Exception as e:
        pass
        # print e
        # print "·········打开失败·········"
        # print "·······当前ip不可用·······"

def downloadPage(url):
    headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'}
    req = urllib2.Request(url, headers=headers) #建立一个请求
    while True:
        try:
            pagecode = urllib2.urlopen(req).read()
            time.sleep(5)
            break
        except Exception as e:
            print e
            time.sleep(5)
    return pagecode
def processXiciPage(pagecode):
    html = etree.HTML(pagecode)
    trs = html.xpath('//tr')
    for tr in trs[1:-1]:
        proxy = tr.xpath('./td[6]/text()')[0]+'://'+tr.xpath('./td[2]/text()')[0]+':'+tr.xpath('./td[3]/text()')[0]
        r.lpush('proxys',proxy)
    pageurl = html.xpath('//*[@class="next_page"]/@href')[0]
    print pageurl
    if pageurl !='' and pageurl!='/nn/2':
        pagecode = downloadPage("http://www.xicidaili.com"+pageurl)
        processXiciPage(pagecode)

if __name__ == '__main__':
    pagecode = downloadPage("http://www.xicidaili.com/nn/")
    processXiciPage(pagecode)
    print "complete 1"

    while True:
        proxy = r.rpop('proxys')
        if proxy == None:
            break
        testWriteProxy(proxy)
    print "complete 1"
   # for proxy in r.lrange('UIPProxy', 1,100):
   #      if proxy == None:
   #          break
   #      print proxy
   #      try:
   #          print len(requests.get('http://www.q7g.top/ip.html', proxies={'https':proxy, 'http':proxy}).text)
   #      except Exception as e:
   #          continue
   #      #testWriteIp(proxy)
    # proxy = '218.72.108.238:18118'
    # headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'}
    # print requests.get('https://www.q7g.top/ip.html' ,headers=headers, proxies={'https':proxy, 'http':proxy}).text