# -*-coding: Utf-8 -*-
# @File : spider_ip .py
# author: Chimengmeng
# blog_url : https://www.cnblogs.com/dream-ze/
# Time：2023/10/10
import json
import os
import random
import time

from settings import db_settings
import requests
from lxml import etree
from fake_useragent import UserAgent
from database.db_handler import MysqlController

mysql_conn = MysqlController()


class SpiderIp():
    def __init__(self):
        self.headers = {
            'User-Agent': UserAgent().random
        }

    def pre_tag_url(self):
        tag_url_list = []
        for i in range(1, 10):
            if i == 1:
                tag_url = 'https://www.89ip.cn/'
                tag_url_list.append(tag_url)
            else:
                tag_url = f'https://www.89ip.cn/index_{i}.html'
                tag_url_list.append(tag_url)
        return tag_url_list

    def spider_ip_port_data(self):
        proxies_list = []
        tag_url_list = self.pre_tag_url()
        start_time = time.time()
        for tag_url in tag_url_list:
            try:
                print(f'当前正在爬取 :>>> {tag_url}')
                print(f'上分一页耗时 :>>> {time.time() - start_time} s')
                response = requests.get(url=tag_url, headers=self.headers)
                page_text = response.text
                tree = etree.HTML(page_text)
                tr_list = tree.xpath('//div[3]/div[1]/div/div[1]/table/tbody/tr')
                for tr in tr_list:
                    ip = tr.xpath('./td[1]/text()')[0].strip()
                    port = tr.xpath('./td[2]/text()')[0].strip()
                    proxy = {
                        'http': f'http://{ip}:{port}'
                    }
                    proxies_list.append(proxy)
            except Exception as e:
                print(f'当前正在爬取 :>>> {tag_url} , 问题 :>>> {e}')
                continue
        return proxies_list

    def save_proxy(self, data):
        file_path = os.path.join(db_settings.DATABASE_DIR, 'proxies.json')
        with open(file_path, 'w', encoding='utf8') as fp:
            json.dump(data, fp)

    def save_to_json(self):
        proxies_list = self.spider_ip_port_data()
        data = {}
        for i in range(1, len(proxies_list)):
            data[i] = proxies_list[i]
        self.save_proxy(data)

    def save_to_mysql(self):
        proxies_list = self.spider_ip_port_data()
        for proxy in proxies_list:
            sql = f'insert into proxy_data(proxy) values ("{proxy}")'
            mysql_conn.exec(sql)
        mysql_conn.close()

    def save_proxy_text(self, data):
        file_path = os.path.join(db_settings.DATABASE_DIR, 'proxies.txt')
        with open(file_path, 'a', encoding='utf8') as fp:
            fp.write(data)

    def save_to_text(self):
        proxies_list = self.spider_ip_port_data()

        self.save_proxy_text(str(proxies_list))


if __name__ == '__main__':
    s = SpiderIp()
    # res = s.spider_ip_port_data()
    s.save_to_json()
