import time
from threading import Thread

import MySQLdb
import redis
import requests
from lxml import etree
import re


def requests_get_with_retry(url):
    for _ in range(10):
        time.sleep(3)
        try:
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
            }
            proxyAddr = "tun-zfgvuf.qg.net:13519"
            authKey = "4A714292"
            password = "69973C84ED1D"
            proxyUrl = "http://%(user)s:%(password)s@%(server)s" % {
                "user": authKey,
                "password": password,
                "server": proxyAddr,
            }
            proxies = {
                "http": proxyUrl,
                "https": proxyUrl,
            }
            response = requests.get(url, headers=headers, proxies=proxies)
            html_str = response.content.decode()
            if "本次访问已触发人机验证，请按指示操作" in html_str:
                raise Exception("触发人机验证")
            if "当前系统正忙，请登录后再试" in html_str:
                raise Exception("系统繁忙")
            return response
        except Exception as e:

            time.sleep(3)
            with open("error.txt", "a", encoding="utf-8") as f:
                f.write(f"{url}请求失败，正在重试：{type(e)} {e}\n")
    return None


# 断点续爬
def parse():
    global redis_conn, mysql_conn, mysql_cursor, cache_data_list
    # 从左边取数据
    while redis_conn.llen('lianjia_task_queue') > 0:
        task_url = redis_conn.lpop('lianjia_task_queue')
        print("获取任务：", task_url)
        response = requests_get_with_retry(task_url)
        if not response:
            # 出错的链接单独存储记录
            redis_conn.rpush('lianjia_task_queue_error', task_url)
            continue
        # 获取html界面
        html_str = response.content.decode()
        root = etree.HTML(html_str)
        item_list = root.xpath("//div[@class='content__list--item']")
        # 获取需要的标题，价格，位置等信息
        for item in item_list:
            try:
                title = "".join(item.xpath("../a[@class='content__list--item--aside']/@title")).strip()
                price = "".join(item.xpath(".//span[@class='content__list--item-price']/em/text()")) + "元/月"
                des = "".join(item.xpath(".//p[@class='content__list--item--des']//text()")).strip()
                # 替换所有的（空格、制表符、换行符）
                des = re.sub(r"\ ", "", des)
                print(("杭州", title, price, des))
                # 添加到缓存列表
                cache_data_list.append(("杭州", title, des, price))
            except Exception as e:
                print("解析异常：", task_url)

        # 编写sql语句
        sql = """insert into tb_lianjia(city, title, address, price) values (%s, %s, %s, %s)"""
        # 批量添加到数据库
        mysql_cursor.executemany(sql, cache_data_list)
        mysql_conn.commit()
        # 每次提交到数据库中，清空缓存
        cache_data_list.clear()


if __name__ == '__main__':
    # 配置数据库
    db_config = {
        'host': 'localhost',
        'user': 'root',  # 替换为你的用户名
        'password': '',  # 替换为你的密码
        'database': 'bilibili',  # 替换为你的数据库名
        'port': 3307,
        'autocommit': True,  # 自动提交事务
        'connect_timeout': 60  # 设置连接超时为60秒
    }
    # 建立数据库连接
    mysql_conn = MySQLdb.connect(**db_config)
    mysql_cursor = mysql_conn.cursor()
    print("数据库连接成功")
    cache_data_list = []

    # 建立redis数据库链接
    redis_conn = redis.Redis(decode_responses=True, db=1)

    # 创建线程
    thead_list = [Thread(target=parse) for _ in range(14)]
    for t in thead_list:
        t.start()
    for t in thead_list:
        t.join()
    # 关闭数据库链接
    mysql_cursor.close()
    mysql_conn.close()
