"""
    @Author  ：思念 
    @File    ：3.使用多线程和代理爬取当当网.py
    @Date    ：2025/4/10 9:57 
"""

import pymongo
import requests
import threading
from lxml import etree
from retrying import retry
from queue import Queue, Empty


class DangDangShop:
    mogo_client = pymongo.MongoClient()
    db = mogo_client['py_spider']['DangDang_shop']

    def __init__(self):
        self.url = 'https://search.dangdang.com/?key=python&act=input'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36'
        }
        self.ip_url = 'https://dps.kdlapi.com/api/getdps/?secret_id=oryqtk04nf6tmcqfmc9i&signature=orv8focd0ej2ccviunlw4xlcrw&num=1&pt=1&format=text&sep=1'
        self.ip_queue = Queue()
        self.url_queue = Queue()
        self.response_queue = Queue()
        self.detail_queue = Queue()

    def get_ip(self):
        try:
            proxy_ip = requests.get(self.ip_url, timeout=3).text
            self.ip_queue.put(proxy_ip)
            print("获取的IP为：", proxy_ip)
        except requests.RequestException as e:
            print("代理IP获取失败")

    def get_max_page_num(self):
        try:
            response = requests.get(self.url, headers=self.headers, timeout=3)
            tree = etree.HTML(response.text)
            max_page_num = tree.xpath("//ul[@name='Fy']/li[last()-2]/a/text()")[0]
            for page in range(1, int(max_page_num) + 1):
                url = f'https://search.dangdang.com/?key=python&act=input&page_index={page}'
                self.url_queue.put(url)
                print(f"正在采集第{page}页数据")
        except Exception as e:
            print("暂无翻页数据", e)
            self.url_queue.put(self.url)  # 请求地址队列不能为空，默认首页

    @retry(stop_max_attempt_number=3)
    def get_goods_list(self):
        while True:
            try:
                url = self.url_queue.get(timeout=3)
                if self.ip_queue.empty():
                    self.get_ip()
                    print("正在获取代理IP....")
                proxy_ip = self.ip_queue.get()
                username = "d2229184921"
                password = "miss1234"
                proxies = {
                    "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip},
                    "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip}
                }
                response = requests.get(url, headers=self.headers, proxies=proxies, timeout=3)
                self.response_queue.put(response)
                if response.status_code == 200:
                    self.ip_queue.put(proxy_ip)  # 若代理IP可用，则放回队列
                    print("可用代理IP:", proxy_ip)
                self.url_queue.task_done()
            except Empty:
                print("URL队列已空，退出线程")
                break
            except Exception as e:
                print("获取商品列表失败:", e)
                self.url_queue.task_done()

    def parse_info(self):
        while True:
            try:
                response = self.response_queue.get(timeout=5)
                tree = etree.HTML(response.text)
                goods_list = tree.xpath("//ul[@class='bigimg']/li")
                for goods in goods_list:
                    goods_detail = {
                        "title": goods.xpath("./a/@title")[0] if goods.xpath("./a/@title") else '空',
                        "price": goods.xpath("p[@class='price']/span[1]/text()")[0] if goods.xpath("p[@class='price']/span[1]/text()") else '空'
                    }
                    self.detail_queue.put(goods_detail)
                    print("数据解析成功：", goods_detail)
                self.response_queue.task_done()
            except Empty:
                print("响应队列已空，退出线程")
                break
            except Exception as e:
                print("解析失败", e)
                self.response_queue.task_done()

    def save_data(self):
        while True:
            try:
                goods_detail = self.detail_queue.get(timeout=5)
                self.db.insert_one(goods_detail)
                print("数据保存成功：", goods_detail)
                self.detail_queue.task_done()
            except Empty:
                print("详情队列已空，退出线程")
                break
            except Exception as e:
                print("保存失败:", e)
                self.detail_queue.task_done()

    def main(self):
        self.get_max_page_num()
        thread_list = list()
        for _ in range(5):
            thread_list.append(threading.Thread(target=self.get_goods_list))
            thread_list.append(threading.Thread(target=self.parse_info))
        save_thread = threading.Thread(target=self.save_data)
        thread_list.append(save_thread)
        for thread in thread_list:
            thread.daemon = True  # 修复守护线程设置
            thread.start()
        # 阻塞队列
        self.url_queue.join()
        self.response_queue.join()
        self.detail_queue.join()
        print("数据爬取结束。。。。")


if __name__ == '__main__':
    dangdang = DangDangShop()
    dangdang.main()
