import requests
from bs4 import BeautifulSoup
import re
import time
import multiprocessing as mp
from queue import Queue

h ={'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36'}

def parse_page(url):
    content = requests.get(url, headers=h).content
    soup = BeautifulSoup(content, 'html5lib')
    tagContent= soup.find('div', class_='tagContent cls')
    pattern = re.compile(r'ItemSpecial f(.)')
    manhua_list = tagContent.find_all('div',class_=pattern)
    page_manhuas = []
    for item in manhua_list:
        manhua = {}
        #名字
        manhua['title'] = item.find('span', class_='itemTitle').text
        #作者
        manhua['author'] =item.find('span', class_='author fl').text
        #点赞量
        manhua['dianzan'] = item.find('span',class_='zanNumber fr').find('span',class_='fl').text


        #dataQueue.put(book)
        page_manhuas.append(manhua)
    return page_manhuas

def main(pages):
    pool = mp.Pool()
    base_url = 'https://www.kuaikanmanhua.com/tag/0?state=1&sort=1&page={}'
    url_list = [base_url.format(i) for i in range(1,pages+1)]
    '''
    url_list=[]
    for i in range(1 , pages+1):
       url=base_url.format(i)
       url_list.append(url)
    '''   

    multi_res=[pool.apply_async(parse_page,(url,)) for url in url_list] 
    pagesManhuas=[res.get() for res in multi_res]

    manhuas = []
    for pagesManhuas in pagesManhuas:
        for manhua in pagesManhuas:
            manhuas.append(manhua)
    #with not dataQueue.empty():
     #   book = dataQueue.get()
     #   book.append(book)
    print(len(manhuas),manhuas[-1])

if __name__=="__main__":
    pages = int(input('请输入一个页数:'))
    t1=time.time()
    main(pages)
    t2=time.time()
    print(t2-t1)
 