import asyncio
import playwright.async_api
from playwright.async_api import async_playwright
import os
import time
from utils.pivixitem import PivixItem
from utils.utils import save_data, get_start_url, make_start_url
from utils.settings import DOMAIN_PREFIX, CONCURRENT_LEVEL
import openpyxl
import json
import numpy as np


#读取画师id
print(os.getcwd())
wb = openpyxl.load_workbook('preflight.xlsx')
ws = wb.active
artwork_id = []
artist_id = []
num = 0
for line in ws:
    if num == 0:
        num += 1
        continue
    artwork_id.append(str(line[0].value))
    artist_id.append(str(line[1].value))
    num += 1


async def pipeline(item:PivixItem):
    await save_data(item)

async def extract_followers(content):
    start_time = time.time()
    followers_list = await content.query_selector_all('//ul[@class="bookmark-items"]/li')

    extract_content = []
    for f in followers_list:
        name = await f.query_selector('//span[@class="user-name"]')
        date = await f.query_selector('//span[@class="date"]')
        item = {}
        item['follower'] = await name.inner_html()
        item['date'] = await date.inner_html()
        extract_content.append(item)

    end_time = time.time()
    print("page extract time = ", end_time - start_time)
    return extract_content

async def fetch(context, aid):
    url = make_start_url(aid)
    print("begin fetch : ", url)
    page = await context.new_page()
    await page.route("**/*", lambda route: route.abort()
    if route.request.resource_type == "image"
    else route.continue_())
    first_page_fetch_start_time = time.time()
    try:
        # load first page
        await page.goto(url, wait_until="load")
    except playwright.async_api.TimeoutError:
        print(aid, ' fetch load fail')
    else:
        # post_time_li = html.xpath('//div[@title="Posting date"]/text()')

        detail = await page.query_selector_all('//ul[@class="sc-1qvk3ka-0 dpDffd"]//li/a')
        first_page_fetch_end_time = time.time()
        print("first page fetch time : ", first_page_fetch_end_time - first_page_fetch_start_time)
        # load detail page
        for i in detail:
            ref = await i.get_attribute('href')
            detail_url = DOMAIN_PREFIX + ref
            try:
                await page.goto(detail_url, wait_until="domcontentloaded")
                content = await extract_followers(page)

                # load next page
                loc = page.locator('.next')
                hasNext = await loc.count()
                while hasNext > 0:
                    await page.click('.next')
                    await page.wait_for_selector('//ul[@class="bookmark-items"]/li')
                    next_content = await extract_followers(page)
                    content.extend(next_content)
                    loc = page.locator('.next')
                    hasNext = await loc.count()
            except playwright.async_api.TimeoutError as e:
                print(e)
                item = PivixItem(p_aid=aid, p_followers=content, p_date="0000")
                pipeline_start_time = time.time()
                await pipeline(item)
                pipeline_end_time = time.time()
                print(aid, ' page finish, pipeline time = ', pipeline_end_time - pipeline_start_time)
                await page.close()
            else:
                print(aid, ' over')
                await page.close()

async def job(queue:asyncio.Queue, context):
    while True:
        aid = await queue.get()
        await fetch(context=context, aid=aid)
        queue.task_done()
        print(aid, " task done.")

async def pixiv_main_task() -> None:
    async with async_playwright() as p:
        start_time = time.time()
        queue = asyncio.Queue()
        browser = await p.chromium.launch(headless=False)
        context = await browser.new_context(storage_state="auth.json")
        tasks = []
        index = 0
        while index < len(artwork_id):
            queue.put_nowait(artwork_id[index])
            index = index + 1
        for i in range(CONCURRENT_LEVEL):
            task = asyncio.create_task(job(queue, context))
            tasks.append(task)
        print("nums of jobs:", len(artwork_id))
        print("concurrent level = ", CONCURRENT_LEVEL)
        await queue.join()
        await browser.close()
        end_time = time.time()
        print("total fetch time ", end_time - start_time)
        for task in tasks:
            task.cancel()

if __name__ == "__main__":
    asyncio.run(pixiv_main_task())
