import requests
import MySQLdb
#加载scrapy框架的选择器
from scrapy.selector import Selector

conn = MySQLdb.connect(host="127.0.0.1",user="root",passwd="root",db="article_spider",charset="utf8")
cursor = conn.cursor()
def crawl_ips():
    headers={"User-Agent":"Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10"}
    re =requests.get('http://www.xicidaili.com/nn/',headers=headers)
    selector=Selector(text=re.text)
    all_trs=selector.css('#ip_list tr')
    ip_list=[]
    for tr in all_trs[1:]:
        speed_str=tr.css('.bar::attr(title)').extract()[0]
        if speed_str:
            speed=float(speed_str.split("秒")[0])
        all_texts=tr.css('td::text').extract()
        ip=all_texts[0]
        port=all_texts[1]
        proxy_type=all_texts[5]
        ip_list.append((ip,port,proxy_type,speed))
    for ip_info in ip_list:
        sql="""
        insert into proxy_ip set ip=%s,port=%s,proxy_type=%s,speed=%s 
        ON DUPLICATE KEY UPDATE
        ip=VALUES(ip),port=VALUES(port),proxy_type=VALUES(proxy_type),speed=VALUES(speed) 
        """
        cursor.execute(sql,[ip_info[0], ip_info[1], ip_info[2], ip_info[3]])
        conn.commit()
    print(ip_list)
    # ON DUPLICATE KEY UPDATE使用教程
    #http://www.jb51.net/article/39255.htm
    # http://www.cnblogs.com/brookin/archive/2013/03/19/2969844.html

class GetIp(object):
    # 随机获取ip地址
    def get_random_ip(self):
        sql="""
        SELECT ip,port,proxy_type FROM proxy_ip ORDER BY RAND() LIMIT 1
        """
        cursor.execute(sql)
        re =cursor.fetchone()
        judge_ip=self.judge_ip(re[0],re[1],re[2])
        if judge_ip:
            if re[2]=="HTTPS":
                return "https://{0}:{1}".format(re[0],re[1])
            else:
                return "http://{0}:{1}".format(re[0],re[1])
        else:
            return self.get_random_ip()


    def judge_ip(self,ip,port,proxy_type):
        http_url="https://www.baidu.com"
        try:
            if proxy_type=="HTTPS":
                proxy_url = "https://{0}:{1}".format(ip, port)
                proxy_dict = {"https": proxy_url}
            else:
                proxy_url = "http://{0}:{1}".format(ip, port)
                proxy_dict={"http":proxy_url}

            re=requests.get(http_url,proxies=proxy_dict)
        except Exception as e:
            print("代理ip失效:%s" % proxy_url)
            self.detail_ip(ip)
            return False
        else:
            code=re.status_code
            if code>=200 and code<300:
                print("可用IP:%s,code:%s" % (proxy_url,code))
                return True
            else:
                print("代理ip失效:%s,code:%s" % (proxy_url,code))
                self.detail_ip(ip)
                return False
    #删除失效的ip
    def detail_ip(self,ip):
        sql="delete from proxy_ip where ip='{0}'".format(ip)
        cursor.execute(sql)
        conn.commit()

if __name__=='__main__':
    crawl_ips()
    # get_ip=GetIp()
    # get_ip.get_random_ip()