#!/usr/bin/python
# -*- coding:utf-8 -*-
# @author  : micah
# @time    :  
# @function: 
# @version :


import threading
import time
import pymongo
from queue import Queue
import requests


class Aqiyi():
    def __init__(self):
        self.client = pymongo.MongoClient(host='127.0.0.1', port=27017)
        self.collection = self.client['spider10']['aqy2']
        self.url = 'https://pcw-api.iqiyi.com/search/recommend/list?channel_id=2&data_type=1&mode=11&page_id={}&ret_num=48&session=9a3d98f727f2e8ad32ea92feb644ff2b&three_category_id=15;must'
        self.headers = {
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36'
        }
        # 存放网址的队列
        self.url_queue = Queue()
        # 存放响应数据的队列
        self.json_queue = Queue()
        # 存放提取之后的队列
        self.content_queue = Queue()

    def get_url(self):
        for i in range(1, 10):
            self.url_queue.put(self.url.format(i))

    def get_data(self):
        while True:
            url = self.url_queue.get()
            self.url_queue.task_done()
            response = requests.get(url, headers=self.headers)
            self.json_queue.put(response.json())

    def parse_data(self):
        while True:
            data = self.json_queue.get()
            self.json_queue.task_done()
            for i in data['data']['list']:
                item = {}
                item['title'] = i['title']
                item['playUrl'] = i['playUrl']
                item['description'] = i['description']
                self.content_queue.put(item)

    def save_data(self):
        while True:
            item = self.content_queue.get()
            self.content_queue.task_done()
            print(item)
            self.collection.insert_one(item)

    def main(self):
        t_list = []
        # 1.获取url地址线程
        t_url = threading.Thread(target=self.get_url)
        t_list.append(t_url)
        # 2. 发送请求线程启动
        for i in range(20):
            t_data = threading.Thread(target=self.get_data)
            t_list.append(t_data)
        # 3.解析数据线程
        for i in range(50):
            t_parse = threading.Thread(target=self.parse_data)
            t_list.append(t_parse)
        # 4.保存数据线程
        t_save = threading.Thread(target=self.save_data)
        t_list.append(t_save)

        for t in t_list:
            t.setDaemon(True)  # 把子线程设置为守护主线程，主线程结束，子线程就结束
            t.start()

        time.sleep(1)
        for q in [self.url_queue, self.json_queue, self.content_queue]:
            q.join()


if __name__ == '__main__':
    t1 = time.time()
    aqy = Aqiyi()
    aqy.main()
    print('总花费时间：', time.time() - t1)
