import requests
from lxml import etree
from queue import Queue
from threading import Thread
import json
import time
import random


class CSDN(object):
    def __init__(self):
        self.base_url = 'https://blog.csdn.net/'
        self.headers = {
            'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
            'upgrade-insecure-requests': '1',
        }

        self.name = name
        url = 'https://blog.csdn.net/Beyond_F4/article/list/1'
        # 创建列表页队列
        self.page_url_queue = Queue()
        # 创建详情页队列
        self.detail_url_queue = Queue()
        print('爬虫启动')

    # 发送请求，获取响应
    def get_response(self, url):
        response = requests.get(url=url, headers=self.headers)
        return response

    # 拼接url，获取所有列表页
    def get_page_url(self):
        i = 1
        n = 0
        while i < 5:
            page_url = self.base_url + self.name + '/article/list/'+str(i)
            response = self.get_response(page_url).content.decode()
            html = etree.HTML(response)
            node_list = html.xpath('//*[@id="mainBox"]/main/div[2]/div[@class="article-item-box csdn-tracking-statistics"]')
            if len(node_list) > 0:
                self.page_url_queue.put(page_url)
                print('第{}页，放入page队列----{}-----条'.format(i,len(node_list)))
                # print('获取列表页面第------{}-----------页，放入队列中，等待处理。。。。。。。。。'.format(i))
                i += 1
                if i > 4:
                    n += 1
                    i = 1
                if n > 10:
                    break

            else:
                print('翻页结束，一共{}页'.format(i-1))
                break

    # 提取数据
    def parse_data(self):
        print('打开列表页')
        while self.page_url_queue.not_empty:
            try:
                page_url = self.page_url_queue.get(timeout=1)
                # print(page_url)
                result = self.get_response(page_url)
                response = result.content.decode()
                html = etree.HTML(response)
                node_list = html.xpath('//*[@id="mainBox"]/main/div[2]/div[@class="article-item-box csdn-tracking-statistics"]')
                for node in node_list:
                    detail_url = node.xpath('./h4/a/@href')[0]
                    # print(detail_url)
                    self.detail_url_queue.put(detail_url)
                    # print('当前队列大小：', self.detail_url_queue.qsize())
            except Exception as e:
                print(e, '1'*20)
                break

    # 详情页
    def detail_page(self):
        # print('打开详情页')
        while self.detail_url_queue.not_empty:
            # print('111111111111111111')
            # print(self.detail_url_queue.qsize())
            try:
                detail_url = self.detail_url_queue.get(timeout=1)
                # print(detail_url)
                response = self.get_response(detail_url).content.decode()
                html = etree.HTML(response)
                title = html.xpath('//*[@id="mainBox"]/main/div[1]/div[1]/h1/text()')[0]
                view = html.xpath('//*[@id="mainBox"]/main/div[1]/div[2]/div/div/span/text()')[0]
                print('当前文章标题：', title)
                print('当前浏览量：', view)
            except Exception as e:
                print(e, '2'*20)
                break


    def run(self):
        thread_list = []
        t1 = Thread(target=self.get_page_url)
        thread_list.append(t1)
        for m in range(5):
            m = Thread(target=self.parse_data)
            thread_list.append(m)
        for j in range(10):
            j = Thread(target=self.detail_page)
            thread_list.append(j)
        for k in thread_list:
            k.start()
            k.join()


if __name__ == '__main__':
    name = input('请输入你的博客名称:')

    while True:
        s_t = time.time()
        csdn = CSDN()
        csdn.run()
        e_t = time.time()
        print('本次耗时:{} 秒'.format(e_t-s_t))
        i = random.randint(1, 4)
        time.sleep(i)


