from pyppeteer import launch
from pyppeteer.browser import Browser
from pyppeteer.network_manager import Response
from lxml import etree
from mini_tools.cjjtool import *
from mini_tools.myexcel import *
from sql import sqlseverDB
import asyncio,re

pat_pid=re.compile(r'-i[.](\d+?[.]\d+)[?]?')


async def crawlDp(browser:Browser,w,h):
    global dcj
    page = await browser.newPage()

    await page.setViewport({'width':w , 'height': h}) 
    await page.evaluateOnNewDocument('''() =>{ Object.defineProperties(navigator,{ webdriver:{ get: () => false } }) }''')

    while True:
        if len(dcj)==0:
            print('采集店铺完毕')
            break
        print(f'任务队列剩余:{len(dcj)}')
        pid,url,shopid=dcj.pop(0)                
        try:

            await repeat_goto(page,url)
            await page.waitFor(2000)
            noexceit=await page.xpath('//div[@role="main" and @class="product-not-exist"]')
            if noexceit:
                print(f'url:{url},产品不存在')
                return
            
            hkkkk=await page.xpath('//img[@class="Yj2cCM"]')

            if hkkkk:
                print('出现滑块')
                await ghkkkk(page)        

            oo=await myawait(page,'//div[@class="_44qnta"]/span/text()')
            if not oo:
                print(f'pid-{pid},等待过久退出')
                aff2=ms.ExecNoQuery('update [searchUrl] set [XiangQingYeZhuangTai]=0 where [id]=?',(pid,))
                #await page.close()
                continue
        
            cot=await page.content()
            dpurl=''
            procount=0
            href=etree.HTML(cot).xpath('//a[@class="btn btn-light btn--s btn--inline btn-light--link Vf+pt4"]/@href')
            if href:
                href=href[0]
                dpurl=href if href.startswith('http') else f'https://shopee.co.id{href}'
                procount=etree.HTML(cot).xpath('//a[@class="R7Q8ES p48aHT"]/span[1]/text()')
                if procount:
                    procount=procount[0]
                    if 'RB' in procount:
                        procount=procount.replace('RB','').replace(',','.')
                        procount=float(procount)*1000

                aff=ms.ExecNoQuery('insert into [shopstore]([SouSuoYeChanPinWangZhi],[DianPuLianJie],[ChanPinShuLiang],[shopid]) values(?,?,?,?)',(url,dpurl,procount,shopid))
                if aff:
                    aff2=ms.ExecNoQuery('update [searchUrl] set [XiangQingYeZhuangTai]=2 where [id]=?',(pid,))
                    print(f'pid-{pid},成功采集店铺链接,更新状态:{aff2}')
                else:
                    print(f'pid-{pid},采集失败')
                    aff2=ms.ExecNoQuery('update [searchUrl] set [XiangQingYeZhuangTai]=0 where [id]=?',(pid,))
            
        except Exception as e:
            print(f'url:{url}-页面出错 => {e}')
            aff2=ms.ExecNoQuery('update [searchUrl] set [XiangQingYeZhuangTai]=0 where [id]=?',(pid,))

    
    await page.close()

token=''
col_ind=3
dcj=[]
ms=None
async def begin_cj_dpurl(w,h):
    global dcj,token,ms
    ms = sqlseverDB('{SQL Server}', '199.33.126.84,1433', 'shopeecoid', 'sa', 'Y3GwLHaP8zQ0seZG')


    while True:
        kchrome()
        objcig=get_config()
        token=objcig['token']
        bargs=['--disable-infobars','--start-maximized']
        if objcig['proxy']:
            bargs.append(f"--proxy-server={objcig['proxy']}")
        browser = await launch(
                handleSIGINT=False,
                handleSIGTERM=False,
                handleSIGHUP=False,
                headless=objcig['headless'],
                #headless=True,
                userDataDir=objcig['userDataDir'],
                executablePath=objcig['executablePath'],
                args=bargs
            )
        dcj=ms.ExecQuery('select top 1000 [id],[SouSuoYeChanPinWangZhi],[shopid] from searchUrl where [XiangQingYeZhuangTai]=0')
        if len(dcj)==0:
            print('数据库暂无待采集数据,请稍后重试')
            break

        print(f'成功从数据库获取{len(dcj)}个待采集产品链接')

        #更新状态值
        idlist=[str(item[0]) for item in dcj]
        idstr=','.join(idlist)
        aff=ms.ExecNoQuery(f'update [searchUrl] set [XiangQingYeZhuangTai]=1 where [id] in ({idstr})')
        print(f'成功将{aff}个数据采集状态标注为1,避免多开重复获取')

        tasks=[asyncio.create_task(crawlDp(browser,w,h)) for j in range(1)]
        await asyncio.gather(*tasks)
        print(f'本次采集完毕')

        r0=ms.ExecQuery('select count(id) from searchUrl where XiangQingYeZhuangTai=0')
        r1=ms.ExecQuery('select count(id) from searchUrl where XiangQingYeZhuangTai=1')
        r2=ms.ExecQuery('select count(id) from searchUrl where XiangQingYeZhuangTai=2')
        r4=ms.ExecQuery('select count(id) from shopstore')
        print(f'本次采集完毕,数据库当前状态 => 未采集网址总数:{r0[0][0]},已采店铺:{r4[0][0]},正在采集网址:{r1[0][0]},已采集网址:{r2[0][0]}')
        #await browser.close()
    return 1

