import requests
from scrapy.selector import Selector
import MySQLdb

conn = MySQLdb.connect(host="127.0.0.1", user="cai", passwd="cai", db="article_spider", charset="utf8")
cursor = conn.cursor()


def crawl_ips():
    """爬取西刺的免费代理ip"""

    # 发请求
    headers = {
        "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
    }

    for i in range(100):
        response = requests.get("https://www.xicidaili.com/nn/{0}".format(i + 1), headers=headers)

        # 使用scrapy的Selector来提取数据
        selector = Selector(text=response.text)
        # all_trs = selector.css("#ip_list tr")
        all_trs = selector.xpath("//table[@id='ip_list']/tr[position()>1]")
        ip_list = []
        for tr in all_trs:
            speed_str = tr.css(".bar::attr(title)").extract_first()
            if speed_str:
                speed = float(speed_str.split("秒")[0])
            all_texts = tr.css("td::text").extract()
            ip = all_texts[0]
            port = all_texts[1]
            proxy_type = all_texts[5]

            ip_list.append((ip, port, proxy_type, speed))

        for ip_info in ip_list:
            cursor.execute(
                "insert proxy_ip (ip,port,speed,proxy_type) values ('{0}','{1}','{2}','{3}')".format(
                    ip_info[0], ip_info[1], ip_info[3], ip_info[2]
                )
            )
            conn.commit()


class GetIp(object):

    def delete_ip(self,ip):
        sql = "delete from proxy_ip where ip = '{0}'".format(ip)
        print("delete sql: {0}".format(sql))
        cursor.execute(sql)
        conn.commit()
        return True


    def judge_ip(self, ip, port):
        http_url = "http://www.baidu.com"
        proxy_url = "http://{0}:{1}".format(ip, port)
        proxy_dict = {
            "http": proxy_url
        }
        try:
            response = requests.get(http_url, proxies=proxy_dict, timeout=3)
            print("response: ", end="")
            print(response)
        except Exception as e:
            print("该代理ip无效，ip: {0}, port: {1}".format(ip, port))
            self.delete_ip(ip)
            return False
        else:
            code = response.status_code
            if code >= 200 and code < 300:
                print(response)
                print("该代理有效，ip: {0}, port: {1}".format(ip, port))
                return True
            else:
                print(response)
                print("该代理ip无效，ip: {0}, port: {1}".format(ip, port))
                self.delete_ip(ip)
                return False



    def get_random_ip(self):
        sql = "select `ip`,`port` from proxy_ip where proxy_type = 'HTTP' order by rand() limit 1"
        result = cursor.execute(sql)
        for ip_info in cursor.fetchall():
            ip = ip_info[0]
            port = ip_info[1]
            print("ip: {0}, port: {1}".format(ip,port))
            # 测试ip
            self.judge_ip(ip,port)


# print(crawl_ips())

if __name__ == '__main__':
    for i in range(400):
        ip_getter = GetIp()
        ip_getter.get_random_ip()