#coding:utf-8
import  settings
import requests
import random
from  bs4 import BeautifulSoup
class Proxies(object):
    test_url = 'http://ip.chinaz.com/getip.aspx'
    pages = settings.PAGES
    #生成每一页的url
    urls = [settings.XICI_HTTPS_PROXIES_URL +str(i) for i  in range(1,pages+1)]
    def __init__(self):
        self.proxies = []
        self.headers = {
            'Host': 'www.xicidaili.com',
            'Connection': 'keep-alive',
            'Cache-Control': 'max-age=0',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36',
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
            'Accept-Encoding': 'gzip, deflate',
            'Accept-Language': 'zh-CN,zh;q=0.8'
        }
    def get_all(self):
        PROXY_POOLS = ['http://' + ip for ip in settings.IP_POOLS]

        proxy_http = random.choice(PROXY_POOLS)
        for url in self.urls:
            html_doc = requests.get(url,headers=self.headers,proxies = {'https':proxy_http}).content
          
            soup =  BeautifulSoup(html_doc,'lxml')
            ip_list=soup.find(id='ip_list').findAll('tr')[1:]
            for tr in ip_list:
                tds  =  tr.findAll('td')
                content= "'"+tds[1].text +':'+tds[2].text+"',"
                with open('xici.html','w') as f:
                    f.write(html_doc)
                with open('ip.txt','a') as f:
                    f.write(content+'\n')
                
    def get_all_available_proxy(self,proxies):
        return requests.get(url=self.test_url,proxies = proxies).ok




if __name__ == '__main__':
    proxy =Proxies()
    proxy.get_all()
#     proxy.proxies = [
#
# '118.114.77.47:8080',
# '120.78.15.63:80',
# '218.15.25.153:808',
# '180.121.133.111:8118',
# '220.166.242.8:8118',
# '27.159.127.95:8118',
# '121.232.73.16:8118',
# '101.68.73.54:53281',
# '111.200.58.94:80',
# '182.90.122.249:8123',
# '110.73.10.39:8123',
# '27.184.125.106:8118',
# '175.20.106.234:9999',
# '60.169.78.218:808',
# '112.117.47.223:808',
# ]
#     for ip in proxy.proxies:
#         proxy_http = 'http://'+ ip
#         try:
#             if requests.get(url = proxy.test_url,timeout =2 ,proxies = {'http':proxy_http}).ok:
#                 print "'" + ip+"'"+','
#         except Exception,e:
#             pass
#



