# coding:utf-8
# !/usr/bin/ python
# Time    : 2021-04-11
# Author  : FightingForWhat
# FileName: proxy_pool.py

import time
import config as cfg
import requests
from lxml import etree
import datetime


class IPFactory:

    def __init__(self):
        self.page_num = cfg.page_num
        self.round = cfg.examine_round
        self.timeout = cfg.timeout
        self.all_ip = set()


    def get_content(self, url, url_xpath, port_xpath):

        # 返回列表
        ip_list = []

        try:
            # 设置请求头信息
            headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'}

            # 获取页面数据
            results = requests.get(url, headers=headers, timeout=4)
            tree = etree.HTML(results.text)

            # 提取ip:port
            url_results = tree.xpath(url_xpath)
            port_results = tree.xpath(port_xpath)
            urls = [line.strip() for line in url_results]
            ports = [line.strip() for line in port_results]

            if len(urls) == len(ports):
                for i in range(len(urls)):
                    # 匹配ip:port对
                    full_ip = urls[i]+":"+ports[i]
                    # 此处利用all_ip对过往爬取的ip做了记录，下次再爬时如果发现
                    # 已经爬过，就不再加入ip列表。
                    if full_ip in self.all_ip:
                        continue
                    # 存储
                    ip_list.append(full_ip)
        except Exception as e:
            print('get proxies error: ', e)

        return ip_list

    def get_all_ip(self):
        """
        各大网站抓取的ip聚合。
        """
        # 有2个概念：all_ip和current_all_ip。前者保存了历次抓取的ip，后者只保存本次的抓取。
        current_all_ip = set()

        ##################################
        # www.66ip.cn
        ###################################
        proxy_site_name = 'www.66ip.cn'
        url_xpath = '/html/body/div[last()]//table//tr[position()>1]/td[1]/text()'
        port_xpath = '/html/body/div[last()]//table//tr[position()>1]/td[2]/text()'
        print('[+] Crawling from ' + proxy_site_name)
        for i in range(self.page_num):
            if i == 0:
                page = 'index'
            else:
                page = str(i + 1)
            url = 'http://www.66ip.cn/' + page + '.html'
            results = self.get_content(url, url_xpath, port_xpath)
            self.all_ip.update(results)
            current_all_ip.update(results)
            # 停0.5s再抓取
            time.sleep(0.5)

        ##################################
        # www.89ip.cn
        ###################################
        proxy_site_name = 'www.89ip.cn'
        url_xpath = '//html/body/div[@class="layui-row layui-col-space15"]/div[@class="layui-col-md8"]/div[@class="fly-panel"]/div[@class="layui-form"]/table[@class="layui-table"]/tbody/tr/td[1]/text()'
        port_xpath = '//html/body/div[@class="layui-row layui-col-space15"]/div[@class="layui-col-md8"]/div[@class="fly-panel"]/div[@class="layui-form"]/table[@class="layui-table"]/tbody/tr/td[2]/text()'
        print('[+] Crawling from ' + proxy_site_name)
        for i in range(self.page_num):
            url = 'https://www.89ip.cn/index_' + str(i+1) + '.html'
            results = self.get_content(url, url_xpath, port_xpath)
            self.all_ip.update(results)
            current_all_ip.update(results)
            time.sleep(0.5)

        ##################################
        # ip.jiangxianli.com
        ###################################
        proxy_site_name = 'ip.jiangxianli.com'
        url_xpath = '//html/body/div[@class="layui-layout layui-layout-admin"]/div[@class="layui-row"]/div[@class="layui-col-md9 ip-tables"]/div[@class="layui-form"]/table[@class="layui-table"]/tbody/tr/td[1]/text()'
        port_xpath = '//html/body/div[@class="layui-layout layui-layout-admin"]/div[@class="layui-row"]/div[@class="layui-col-md9 ip-tables"]/div[@class="layui-form"]/table[@class="layui-table"]/tbody/tr/td[2]/text()'
        print('[+] Crawling from ' + proxy_site_name)
        for i in range(self.page_num):
            url = 'https://ip.jiangxianli.com/?page=' + str(i+1)
            results = self.get_content(url, url_xpath, port_xpath)
            self.all_ip.update(results)
            current_all_ip.update(results)
            time.sleep(0.5)

        ##################################
        # www.kuaidaili.com
        ###################################
        proxy_site_name = 'www.kuaidaili.com'
        url_xpath = '//td[@data-title="IP"]/text()'
        port_xpath = '//td[@data-title="PORT"]/text()'
        print('[+] Crawling from ' + proxy_site_name)
        for i in range(self.page_num):
            url = 'http://www.kuaidaili.com/free/inha/' + str(i+1) + '/'
            results = self.get_content(url, url_xpath, port_xpath)
            self.all_ip.update(results)
            current_all_ip.update(results)
            time.sleep(0.5)

        return current_all_ip

    def get_valid_ip(self, ip_set, timeout):
        """
        代理ip可用性测试
        """
        # 设置请求地址
        url = 'https://www.baidu.com'

        # 可用代理结果
        results = set()

        # 挨个检查代理是否可用
        for p in ip_set:
            proxy = {'http': 'http://'+p}
            try:
                # 请求开始时间
                start = time.time()
                r = requests.get(url, proxies=proxy, timeout=timeout)
                # 请求结束时间
                end = time.time()
                # 判断是否可用
                if r.text is not None:
                    print('succeed: ' + p + '\t' + " in " + format(end-start, '0.2f') + 's')
                    # 追加代理ip到返回的set中
                    results.add(p)
            except OSError:
                print('timeout:', p)

        return results


    def get_the_best(self, valid_ip, timeout, round):
        """
        N轮检测ip列表，避免"辉煌的15分钟"
        """
        # 循环检查次数
        for i in range(round):
            print("\n>>>>>>>\tRound\t"+str(i+1)+"\t<<<<<<<<<<")
            # 检查代理是否可用
            valid_ip = self.get_valid_ip(valid_ip, timeout)
            # 停一下
            if i < round-1:
                time.sleep(30)

        # 返回可用数据
        return valid_ip


    def save_to_text(self, valid_ips):
        """
        将可用的ip存储进mysql数据库
        """
        if len(valid_ips) == 0:
            print("本次没有抓到可用ip。")
            return
        # 连接数据库
        print("\n>>>>>>>>>>>>>>>>>>>> 代理数据入库处理 Start  <<<<<<<<<<<<<<<<<<<<<<\n")

        proxy_record_list = []

        try:
            doc_result = open('proxy_record.txt', 'w')
            for item in valid_ips:
                # 检查表中是否存在数据

                # 新增代理数据入库
                if item not in proxy_record_list:
                    # 插入数据
                    proxy_record_list.append(item)
                    # 输出入库状态
                    print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " " + item + " 添加成功。")
                    doc_result.write(item + '\n')

                else:
                    print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " " + item + " 已存在。")
            doc_result.close()
        except Exception as e:
            print("添加失败：" + str(e))

        print("\n>>>>>>>>>>>>>>>>>>>> 代理数据入库处理 End  <<<<<<<<<<<<<<<<<<<<<<\n")


    def get_proxies(self):
        ip_list = []

        # 检查数据表中是否有数据
        try:
            ip_list = []
            proxy_record_open = open('proxy_record.txt', 'r')
            proxy_record_lines = proxy_record_open.readlines()
            proxy_record_text_length = len(proxy_record_lines)
            if proxy_record_text_length == 0:
                pass
            else:
                for proxy in proxy_record_lines:
                    ip_list.append(proxy)
                proxy_record_open.close()
        except Exception as e:
            print("从文件获取ip失败！")

        return ip_list


def logo():

    print(r'''
                                                        .__   
_____________  _______  ______.__. ______   ____   ____ |  |  
\____ \_  __ \/  _ \  \/  <   |  | \____ \ /  _ \ /  _ \|  |  
|  |_> >  | \(  <_> >    < \___  | |  |_> >  <_> |  <_> )  |__
|   __/|__|   \____/__/\_ \/ ____| |   __/ \____/ \____/|____/
|__|                     \/\/      |__|                               
        
        ''')
    return

def main():
    ip_pool = IPFactory()
    logo()
    current_ips = ip_pool.get_all_ip()
    # 获取有效ip
    valid_ip = ip_pool.get_the_best(current_ips, cfg.timeout, cfg.examine_round)
    # print(valid_ip)
    ip_pool.save_to_text(valid_ip)


if __name__ == '__main__':
    main()
