import requests
from scrapy.selector import Selector

from kkspider.kkspider.utils.mysqlUtil import MysqlUtil


class GetIp(object):
    def delete_ip(self, ip):
        delete_sql = """
            delete from ip_proxy WHERE ip='{0}'
        """.format(ip)
        mysqlutil = MysqlUtil()
        mysqlutil.delete_sql(sql=delete_sql)
        pass

    def judge_ip(self, ip, port):
        #判断ip是否可用
        http_url = "http://www.baidu.com"
        proxy_url = "http://{0}:{1}".format(ip, port)
        try:
            proxy_dict = {
                "http":proxy_url,
            }
            response = requests.get(http_url, proxies=proxy_dict)
        except Exception as e:
            print ("invalid ip and port")
            self.delete_ip(ip)
            return False
        else:
            code = response.status_code
            if code >= 200 and code < 300:
                print ("effective ip")
                return True
            else:
                print  ("invalid ip and port")
                self.delete_ip(ip)
                return False

    def get_random_ip(self):
        # 从数据库中随机获取一个有用的iP，尝试失败5次重新爬取
        mysqlutil = MysqlUtil()
        query_sql = """
          select ip,port from ip_proxy
          ORDER BY RAND() limit 1
        """
        result = mysqlutil.execute_without_param(query_sql)
        print("http://{0}:{1}".format(result[0], result[1]))
        judge_flag = self.judge_ip(result[0], result[1])
        if judge_flag:
            print("http://{0}:{1}".format(result[0], result[1]))
            return "http://{0}:{1}".format(result[0], result[1])
        else:
            return self.get_random_ip()

    pass


def crawl_ips():
    # 使用西刺的免费IP代理
    headers = {
        "User-Agent": "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Mobile Safari/537.36"
    }
    mysqlutil = MysqlUtil()
    # 国内高匿代理
    for i in range(100):
        re = requests.get("http://www.xicidaili.com/nn/{0}".format(i), headers=headers)
        selector = Selector(text=re.text)
        all_trs = selector.css("#ip_list tr")
        # 这里是为了去掉表头
        for tr in all_trs[1:]:
            speed_str = tr.css(".bar::attr(title)").extract()[0]
            if speed_str:
                speed = float(speed_str.split("秒")[0])
            address = tr.css("td a::text").extract_first()
            all_texts = tr.css("td::text").extract()
            ip = all_texts[0]
            port = all_texts[1]
            proxy_type = all_texts[5]
            use_time = all_texts[10]
            insert_sql = """
                                                       insert into ip_proxy(ip,port,proxy_type,address,use_time) 
                                                       VALUES (%s,%s,%s,%s,%s) 
                                                   """
            params = (ip, port, proxy_type, address, use_time
                      )
            mysqlutil.execute_with_param(sql=insert_sql, params=params)

    pass


# print (crawl_ips())
if __name__ == "__main__":
    get_ip = GetIp()
    get_ip.get_random_ip()
