# 可不可行未知，速度太慢

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import time
import datetime
import json
from queue import Queue
import threading


class DouyuTVSpider:
    def __init__(self):
        self.start_url = "https://www.douyu.com/directory/all"
        chrome_options = Options()
        chrome_options.add_argument('--headless')
        chrome_options.add_argument('--disable-gpu')
        self.driver = webdriver.Chrome(r'E:\chromedriver_win32 存放目录\chromedriver.exe')
        self.write_queue = Queue()
        self.thread_list = []
        self.count = 1

    def get_content_list(self):
        self.count += 1
        if self.count>50:
            return
        # li_list = self.driver.find_elements_by_xpath("//ul[@class='layout-Cover-list']/li[@class='layout-Cover-item']")
        time.sleep(2)
        li_list = self.driver.find_elements_by_class_name('layout-Cover-item')
        content = []
        for li in li_list:
            item = {}
            # item['room_img'] = li.find_element_by_xpath(
            #     "//div[@class='LazyLoad is-visible DyImg DyListCover-pic']/img").get_attribute('src')
            item['room_img'] = li.find_element_by_css_selector("img.DyImg-content.is-normal ").get_attribute('src')
            # item['room_title'] = li.find_element_by_xpath(
            #     "//div[@class='DyListCover-content']/div[@class='DyListCover-info'][1]/h3").text
            item['room_title'] = li.find_element_by_css_selector(
                "h3.DyListCover-intro").text
            # item['room_category'] = li.find_element_by_xpath(
            #     "//div[@class='DyListCover-content']/div[@class='DyListCover-info'][1]/span").text
            item['room_category'] = li.find_element_by_css_selector(
                "span.DyListCover-zone").text
            # item['anchor'] = li.find_element_by_xpath(
            #     "//div[@class='DyListCover-content']/div[@class='DyListCover-info'][2]/h2").text
            item['anchor'] = li.find_element_by_css_selector(
                "h2.DyListCover-user").text
            # item['hot'] = li.find_element_by_xpath(
            #     "//div[@class='DyListCover-content']/div[@class='DyListCover-info'][2]/span").text
            item['hot'] = li.find_element_by_css_selector("span.DyListCover-hot").text
            now_time = datetime.datetime.now()
            item['spider_time'] = datetime.datetime.strftime(now_time,'%Y-%m-%d %H:%M:%S')
            content.append(item)
        self.write_queue.put(content)
        print(1)
        self.driver.find_element_by_css_selector("li.dy-Pagination-next").click()
        self.get_content_list()
    def save_content_list(self):
        while True:
            dict_data = self.write_queue.get()
            with open("douyuTV.json", "a", encoding="utf-8") as f:
                f.write(json.dumps(dict_data, ensure_ascii=False, indent=4)+",")
            self.write_queue.task_done()
    def run(self):
        self.driver.get(self.start_url)
        time.sleep(2)
        self.driver.find_element_by_css_selector("span.ZoomTip-tipHide").click()
        with open("douyuTV.json", "a", encoding="utf-8") as f:
            f.write('{"data":[')
        self.get_content_list()
        for i in range(5):
            t_save_news = threading.Thread(target=self.save_content_list)
            self.thread_list.append(t_save_news)
        for t in self.thread_list:
            t.setDaemon(True)
            t.start()
        for q in [self.write_queue]:
            q.join()
        with open("douyuTV.json", "a", encoding="utf-8") as f:
            f.write("]}")
        self.driver.quit()



if __name__ == "__main__":
    douyuTV_spider = DouyuTVSpider()
    douyuTV_spider.run()
