import logging
import time

import logs
import os
import sys
import uuid
import confhelper
import subprocess
os.chdir(os.path.dirname(__file__))

from asyncio import InvalidStateError
from playwright.sync_api import Playwright, sync_playwright

import dbmapper
from entity import LinkMonResult, MonLinkBatch, MonLinkVersion, MonLink

logging.config.fileConfig("../logging.conf", disable_existing_loggers=False)
logger = logging.getLogger("tb")

wiew_size = {"width": 1080, "height": 3240}

work_dir = confhelper.confdata().get("work_dir")
linkpath_prefix = confhelper.confdata().get("linkpath_prefix")+"tb/"
picList = []
monLinks = []

logger.info("sys.argv: {}".format(sys.argv))
version = sys.argv[1]

#输入Chrome浏览器所在路径
chrome_path = r'"C:\Program Files\Google\Chrome\Application\chrome.exe"'
debugging_port = "--remote-debugging-port=9222"

def run(playwright: Playwright, link: MonLink) -> None:
    if link.id == 1904:
        return

    command = f"{chrome_path} {debugging_port}"
    subprocess.Popen(command, shell=True)

    # browser = playwright.firefox.launch(headless=False,slow_mo=2000)
    # context = browser.new_context(screen=wiew_size, viewport=wiew_size, storage_state="tb_state.json")
    # browser = playwright.chromium.launch(channel="chrome",headless=False, slow_mo=2000)
    browser = playwright.chromium.connect_over_cdp("http://localhost:9222", slow_mo=2000)
    # context = browser.new_context(storage_state="tb_state.json")
    context = browser.contexts[0]

    # Open new page
    page = context.pages[0]

    i = 0
    url = link.url
    linkId = link.id

    currentPage = link.currentPage + 1
    logger.info("linkId:{}, 当前页：{}，链接：{}".format(linkId, currentPage, url))

    logger.info("开始遍历 {}".format(url))
    page.goto(url)

    check(page,1)

    if page.locator("text=没有找到相应的店铺信息").count() > 0:
        logger.error("无效店铺: {}".format(url))
        dbmapper.MonLinkMapper.updateInvalid(MonLink(id=link.id))
        return

    if page.locator("text=店铺终止经营公告").count() > 0:
        logger.error("无效店铺: {}".format(url))
        dbmapper.MonLinkMapper.updateInvalid(MonLink(id=link.id))
        return

    check(page, 1)
    #
    if page.locator(".J_SearchForm").locator("text=搜本店").count() > 0 :
        page.locator(".J_SearchForm").locator("text=搜本店").click()
    elif page.locator(".all-cats.popup-container").locator("text=所有分类").count() > 0 :
        page.locator(".all-cats.popup-container").locator("text=所有分类").click()
    elif page.locator("#searchHeader").locator("text=搜本店").count() > 0:
        page.locator('#q').fill('s')
        page.locator("#searchHeader").locator("text=搜本店").click()
        if page.locator(".all-cats.popup-container").locator("text=所有分类").count() > 0:
            page.locator(".all-cats.popup-container").locator("text=所有分类").click()
    elif page.locator("text=全部宝贝").count() > 0 :
        page.locator("text=全部宝贝").click()

    check(page, 1)

    pageTotal = page.locator(".pagination.pagination-mini").locator(".page-info").text_content()
    pageNum = int(pageTotal.split("/")[1])
    logger.info("url {} pageTotal {} pagenum: {} ".format(url,pageTotal,pageNum))

    tempCurrentPage = int(pageTotal.split("/")[0])
    if currentPage > tempCurrentPage:
        page.locator('.pagination input[name="pageNo"]').fill(str(currentPage))
        page.locator('.pagination button').click()

    check(page,1)

    goodSum = page.locator('xpath=//*[@id="shop-search-list"]/div/div[2]/span').text_content().replace(" ", "")
    monLinkBatch = MonLinkBatch(linkId=linkId)
    linkBatchCount = dbmapper.MonLinkBatchMapper.count(monLinkBatch)
    if int(linkBatchCount) > 0:
        dbmapper.MonLinkBatchMapper.update(MonLinkBatch(goodSum=goodSum, linkId=linkId))
    else:
        monLinkBatchInsert = MonLinkBatch(goodSum=goodSum, linkId=linkId, status='0', linkUrl=url)
        dbmapper.MonLinkBatchMapper.insert(monLinkBatchInsert)

    logger.info("第{}页".format(currentPage))

    #print("第"+str(p_num)+"页 url size: "+str(len(pageLinks)))
    pageLinks = page.locator(".item3line1").locator(".item").all()
    if len(pageLinks) == 0:
        pageLinks = page.locator(".item30line1").locator(".item").all()

    logger.info("当前页面商品链接集合大小：{}".format(len(pageLinks)))

    if len(pageLinks) > 0:
        view_page(currentPage, i, page, pageLinks, picList, url, linkId)
    logger.info("结束遍历 {}".format(url))

    if currentPage == pageNum:
        monLinkUpdate = MonLink(version=version, id=linkId, currentPage=currentPage, totalPage=pageNum)
        dbmapper.MonLinkMapper.update_by_id(monLinkUpdate)
    else:
        monLinkUpdate = MonLink(version=(int(version) - 1), id=linkId, currentPage=currentPage, totalPage=pageNum)
        dbmapper.MonLinkMapper.update_by_id(monLinkUpdate)
    # Close page
    page.close()
    # ---------------------
    context.close()
    browser.close()
    time.sleep(2)


def view_page(p_num:int,i:int,page,pageLinks,picList, linkUrl, linkId):
    for p in pageLinks:
        i = i + 1
        j = 0
        logger.info("网页爬取进度:{}/{}".format(i, len(pageLinks)))
        try:
            #page.wait_for_load_state('networkidle')
            with page.expect_popup(timeout=60000) as popup_info:
                check(page,1)
                p.first.click()
                # logger.info("enabled: {}, visible: {}, url: {}".format(p.is_enabled(),p.is_visible(),""))
            page1 = popup_info.value

            check(page1,1)

            if page1.locator("text=宝贝详情").count() > 0 :

                if page1.locator("text=Sorry").count() == 0:
                    # if page1.locator(".sufei-dialog-close").count() > 0:
                    #     page1.locator(".sufei-dialog-close").click()

                    pic = linkpath_prefix +str(p_num)+"-"+str(i)+"-"+str(j)+page1.title().replace("*","").replace("/","#").replace(" ","")[0:30]+".png"
                    page1.wait_for_timeout(2000)

                    check(page1,1)

                    # page1.wait_for_load_state('networkidle')
                    page1.screenshot(path=pic,full_page=True)
                    picList.append(pic)

                    linkMonResult = LinkMonResult(str(uuid.uuid4().int), linkUrl, page1.url,
                                                  "/profile" + pic.replace(work_dir, ""), linkId=linkId)
                    count = dbmapper.LinkMonResultMapper.count(linkMonResult)
                    if int(count) > 0:
                        linkMonResult.status = 0
                        linkMonResult.ocrStr = ""
                        dbmapper.LinkMonResultMapper.update(linkMonResult)
                    else:
                        dbmapper.LinkMonResultMapper.insert(linkMonResult)

                    linkVersion = MonLinkVersion(linkId=linkId, linkUrl=linkUrl, goodUrl=page1.url, version=version)
                    lvCount = dbmapper.MonLinkVersionMapper.count(linkVersion);
                    if int(lvCount) <= 0:
                        dbmapper.MonLinkVersionMapper.insert(linkVersion)
                else:
                    logger.info("出现图形验证进入60秒睡眠")
                    #time.sleep(60)
                    page1.wait_for_timeout(60000)
            else:
                while page1.locator("text=宝贝详情").count() == 0:
                    logger.info("出现图形验证小弹框等待5秒")
                    #time.sleep(5)
                    #send_email("spidetb","出现图形验证等待5秒")
                    page1.wait_for_timeout(5000)

            page1.close()
        except UnboundLocalError as e:
            logger.exception("UnboundLocalError traceback is:{}".format(e))
        except InvalidStateError as e:
            logger.exception("InvalidStateError traceback is:{}".format(e))
        except TimeoutError as e:
            logger.exception("TimeoutError traceback is:{}".format(e))
        except Exception as e:
            logger.info("exception {} url:{}".format(i,page1.url))
            logger.exception("exception traceback is:{}".format(e))
            page1.close()

def check(page, checkI: int):
    if checkI > 3:
        return
    logger.info("检测第{}次".format(checkI))
    time.sleep(0.1)

    # while page.locator("text=we have detected unusual traffic").count() > 0:
    #     logger.info("13出现图形验证等待5秒")
    #     page.wait_for_timeout(5000)
    # while page.frame_locator('#sufei-dialog-content').count() > 0:
    #     logger.info("13出现图形验证等待5秒")
    #     page.wait_for_timeout(5000)
    while 'redirectURL' in page.url:
        logger.info("0全部宝贝 出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator("#container").count() >0:
        logger.info("7全部宝贝 出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator('.baxia-dialog').is_visible():
        logger.info("1出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator('.sufei-dialog').is_visible():
        logger.info("2出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator('.sufei-dialog-content').is_visible():
        logger.info("3出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator('.sufei-dialog-kissy').is_visible():
        logger.info("4出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator('.bannar').is_visible():
        logger.info("8""出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator(".captcha-tips").count() > 0:
        logger.info("5出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator(".captcha-tips").count() >0:
        logger.info("6全部宝贝 出现图形验证等待5秒")
        page.wait_for_timeout(5000)
    while page.locator("body").text_content() == '':
        logger.info("9全部宝贝 出现图形验证等待5秒")
        page.wait_for_timeout(5000)

    checkI = checkI + 1
    check(page, checkI)

os.chdir(os.path.dirname(__file__))
# 通过db加载url
monLink = dbmapper.MonLinkMapper.list_by_typea('tb', version)
if len(monLink) > 0:
    monLinks = monLink

logger.info("待监测的url %s",[(link.id,link.url) for link in monLinks])
currentI = 0
if len(monLinks) > 0:
    for link in monLinks:
        currentI = currentI + 1
        logger.info("整体进度：{}/{}".format(currentI, len(monLinks)))
        with sync_playwright() as playwright:
            run(playwright, link)