from queue import Queue
from collections import defaultdict,Counter
from io import BytesIO
from playwright.async_api import async_playwright, Playwright,Browser,Page
from playwright.async_api import TimeoutError as PlaywrightTimeout
from urllib.parse import urlencode
from api import fix_excel_xml,get_yn_0timestamp
from config import *
from county_config import *
from tool import tms,get_shopee_timestamp,ask_deepseek,close_SunBrowser,getTimeStr,timestamp_to_sql_datetime,try_to_float
from tool import get_xpllbb_value,deepseek_session_singleton,remove_chinese
import requests,subprocess,asyncio,os,re,json,openpyxl,time,datetime
import concurrent.futures,csv,copy,math,logging,traceback,difflib
import random,socket,psutil,urllib.parse,string,xlrd,dirtyjson,uuid


class LoginError(Exception):
    pass


from playwright.async_api import Page

async def plw_bring_to_front_if_needed(page: Page):
    """只在页面不在前台时 bringToFront，后台循环检测"""
    while True:
        try:
            is_focused = await page.evaluate("document.hasFocus()")
            if not is_focused:
                await page.bring_to_front()
        except Exception:
            pass  # 可以加日志记录错误
        await asyncio.sleep(random.randint(30, 60))

async def connect_browser_pw(basic_info,task_id) ->tuple[Playwright,Browser]:
    rw_name=Task_Name_dict[task_id]
    bid=basic_info['BrowserID']
    bname=basic_info['DpName']
    gname=basic_info['GroupName']
    ping_tai=basic_info['PingTai']
    params={
        'profile_id':bid,
        'last_opened_tabs':'1',
        'proxy_detection':'0'
    }

    while True:
        now_time=datetime.datetime.now()
        err_zds=['iscl','task_id','task_name','BID','loginTime','pintai','bname','gname','des']
        err_params=[0,task_id,rw_name,bid,now_time,ping_tai,bname,gname]
        playwright=None
        try:    
            res = requests.post(f'{ADSPower_API_URL}/api/v2/browser-profile/start',json=params,timeout=120)
            res_data = res.json()
            if res_data['code']==0:
                browser=None
                try:
                    ws_url = res_data['data']['ws']['puppeteer']
                    playwright = await async_playwright().start()
                    browser = await playwright.chromium.connect_over_cdp(ws_url)
                    aff=0
                    start_zds=['bid','task_id','task_name','bname','gname','is_on','state']
                    start_params=[bid,task_id,rw_name,bname,gname,1,0]
                    aff=tms.merge_into('taskStatus',start_zds,start_params,['bid','task_id'],True)
                    logging.info(f'店铺:《{bname}》,{bid},启动指纹浏览器成功,登记任务:{aff}')
                    rs= tms.ExecQuery('select count(*) from taskstatus where bid=? and task_id!=? and is_on=1',(bid,task_id))

                    if rs[0][0]==0:
                        ctx=browser.contexts[0]
                        ps=ctx.pages
                        logging.info(f'店铺:《{bname}》,{bid},无其他任务执行,关闭无关页面{len(ps)}个')
                        for pp in ps[1:]:
                            await pp.close()
                    return playwright,browser
                except Exception as e:
                    logging.info(f'店铺:《{bname}》,{bid},接管浏览器错误 => {e}')
                    if playwright:
                        await playwright.stop()
                    close_SunBrowser(bid,task_id)
                    return              

            else:
                err_msg=res_data.get('msg')
                if  err_msg=='Too many request per second, please check':
                    wait_sleep_second=random.randint(3,8)
                    logging.info(f'店铺:《{bname}》,{bid},等待 {wait_sleep_second} 秒,重试...')
                    await asyncio.sleep(wait_sleep_second)
                else:
                    err_params.append(err_msg)
                    aff1=tms.merge_into('ErrBid',err_zds,err_params,['bid'])
                    start_zds=['bid','task_id','task_name','bname','gname','is_on','state']
                    start_params=[bid,task_id,rw_name,bname,gname,0,0]
                    aff2=tms.merge_into('taskStatus',start_zds,start_params,['bid','task_id'],True)
                    logging.error(f"店铺:《{bname}》,{bid},启动指纹浏览器错误:{err_msg},错误状态:{aff1},任务状态:{aff2}")
                    close_SunBrowser(bid,task_id)
                    return
            time.sleep(1)
        except Exception as e:
            traceback.print_exc()
            logging.error(f'店铺:《{bname}》,{bid},接口请求错误 => {e}')
            if playwright:
                await playwright.stop()
            close_SunBrowser(bid,task_id)
            return

async def plw_check_pass(page:Page,basic_info,is_only_one=False):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    wait_count=1
    while True:
        try:
            el_pass_ipt=page.locator('xpath=//div[@class="modal-verify-password__title"]/..//input[@type="password"]')
            el_pass_ipt2=page.locator('xpath=//div[@class="verify-container"]//input[@type="password"]')
            el_pass_ipt3=page.locator('xpath=//div[@class="shopee-security-session__title"]/..//input[@type="password"]')
            if (await el_pass_ipt.count())>0:
                logging.info(f'店铺:《{dpName}》,{bid},需要二次验证密码(第一种),开始验证...')
                await el_pass_ipt.fill(password)
                await asyncio.sleep(3)
                el_btn_verify=page.locator('xpath=//div[@class="modal-verify-password__title"]/..//span[text()="Verify" or text()="进行验证"]')
                await el_btn_verify.click(delay=20)
                await asyncio.sleep(3)
                if is_only_one:
                    return
            if (await el_pass_ipt2.count())>0:
                logging.info(f'店铺:《{dpName}》,{bid},需要二次验证密码(第二种),开始验证...')
                await el_pass_ipt2.fill(password)
                await asyncio.sleep(3)
                el_btn_verify=page.locator('xpath=//div[@class="verify-container"]//button[2]')
                await el_btn_verify.click(delay=20)
                await asyncio.sleep(3)
                if is_only_one:
                    return
            if (await el_pass_ipt3.count())>0:
                logging.info(f'店铺:《{dpName}》,{bid},需要二次验证密码(第三种),开始验证...')
                await el_pass_ipt3.fill(password)
                await asyncio.sleep(3)
                el_btn_verify=page.locator('xpath=//div[@class="shopee-security-session__title"]/..//button[2]')
                await el_btn_verify.click(delay=20)
                await asyncio.sleep(3)
                if is_only_one:
                    return

            
        except Exception as e:
            print(e)
        if is_only_one:
            await asyncio.sleep(1)
            wait_count+=1
            if wait_count>5:
                return
        else:
            await asyncio.sleep(10)


async def plw_check_login(page:Page,basic_info,acc_xpath='xpath=//div[@class="subaccount-info"]'):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    is_kj=basic_info['is_KJ']
    wait_count=0
    no_login_count=0
    no_login_xpath='xpath=//input[@name="loginKey"]'
    if is_kj==1:
        acc_xpath='xpath=//div[@class="account-name"]'
        no_login_xpath='xpath=//div[@class="signin-form-template"]'
    while True:

        if MAX_WAIT and wait_count>MAX_WAIT:
            return
        try:
            acc= page.locator(acc_xpath)
            no_login=page.locator(no_login_xpath)
                
            if await(no_login.count())>0:
                if no_login_count>=3:
                    return
                
                logging.info(f'店铺:《{dpName}》,{bid},账号未登录,进行登录')

                if not userName or not password:
                    return
                if is_kj==0:
                    el_zh=page.locator('input[name="loginKey"]')
                    el_mb=page.locator('input[name="password"]')
                else:
                    el_zh=page.locator('xpath=//form//input').nth(0)
                    el_mb=page.locator('xpath=//form//input').nth(1)
                await el_zh.click()                      
                await el_zh.fill("")                   
                await el_zh.type(userName,delay=80)
                await asyncio.sleep(1.7)
                await el_mb.click()                      
                await el_mb.fill("")                   
                await el_mb.type(password,delay=80) 
                try:
                    async with page.expect_navigation():  # 同步 API 使用上下文管理器
                        btn_login=page.locator('xpath=//form/button[1]')
                        try:
                            await btn_login.click(delay=200)
                        except Exception as e:
                            return
                except TimeoutError:
                    acc0=page.locator(acc_xpath)
                    if (await acc0.count())>0:
                        logging.info(f'店铺:《{dpName}》,{bid},已登录acc0')
                        return page
                    else:
                        logging.info(f'店铺:《{dpName}》,{bid},登录超时错误')
                        return
                except PlaywrightTimeout:
                    acc0=page.locator(acc_xpath)
                    if (await acc0.count())>0:
                        logging.info(f'店铺:《{dpName}》,{bid},已登录acc0')
                        return page
                    else:
                        logging.info(f'店铺:《{dpName}》,{bid},登录超时错误')
                        return
                    
                #logging.info(f"店铺:{dpName},{bid},当前URL:{page.url}")
                if 'verify/captcha' in page.url or 'verify/traffic/error' in page.url:
                    logging.info(f'店铺:{dpName},{bid},导航到错误页面退出')
                    sj_sce=1+random.random()*4
                    await asyncio.sleep(sj_sce)
                    raise LoginError
                    
                no_login_count+=1
                
            if (await acc.count())>0:
                logging.info(f'店铺:《{dpName}》,{bid},已登录acc')
                return page
            
            if 'verify/captcha' in page.url or 'verify/traffic/error' in page.url:
                logging.info(f'店铺:{dpName},{bid},301到了错误页面')
                sj_sce=1+random.random()*4
                await asyncio.sleep(sj_sce)
                raise LoginError
                
            wait_count+=1
        except LoginError:
            logging.info(f"店铺:{dpName},{bid},301到了错误页面")
            return
        except Exception as e:
            #traceback.print_exc()
            logging.info(f'店铺:《{dpName}》,{bid},出错 => {e}')
            wait_count+=1
        await asyncio.sleep(1)

async def plw_repeat_goto(page:Page,url):

    err_count=0
    while True:
        try:
            await page.goto(url,timeout=120000)
            break
        except Exception as e:
            err_count+=1
    if err_count>=3:
        raise

async def plw_wait_xpath(page:Page,xpath,host='seller.shopee.co.id'):
    err_count=0
    wait_count=0
    ss=page.locator(f'xpath={xpath}')
    while True:
        try:
            if (await ss.count())>0:
                return ss
        except TimeoutError:
            err_count+=1
            await page.reload(timeout=120000)
        
        if wait_count>=120:
            err_count+=1
            wait_count=0
        if err_count>3:
            raise
        if 'verify/captcha' in page.url:
            url=f'https://{host}/'
            await plw_repeat_goto(page,url) 
        await asyncio.sleep(1)
        wait_count+=1

async def plw_make_request(page: Page, url, params=None, data=None):
    if params:
        url = f"{url}?{urlencode(params)}"

    for _ in range(3):
        try:
            if data is None:
                request_script = """
                async (url) => {
                    const response = await fetch(url, {
                        method: 'GET',
                        headers: {
                            'Content-Type': 'application/json'
                        }
                    });
                    return await response.json(); 
                }
                """
                response_data = await page.evaluate(request_script, url)
            else:
                request_script = """
                async ({ purl, postData }) => {
                    const response = await fetch(purl, {
                        method: 'POST',
                        headers: {
                            'Content-Type': 'application/json'
                        },
                        body: JSON.stringify(postData)
                    });
                    return await response.json(); 
                }
                """
                response_data = await page.evaluate(request_script, {
                    "purl": url,
                    "postData": data
                })
            
            return response_data
        except Exception as e:
            logging.error(f'js异步抓包错误 => 《{url}》, {e}')
            #print(f'js异步抓包错误 => 《{url}》, {e}')
            await asyncio.sleep(3)

    return None

async def plw_get_cot(page:Page, url, params=None):

    if params:
        url = f"{url}?{urlencode(params)}"

    for _ in range(3):
        try:
   
            request_script = """
            async function(url) {
                const response = await fetch(url, {
                    method: 'GET',
                    headers: {
                        'Accept': 'application/json, application/force-download, text/plain, */*',
                        'Content-Type': 'application/json'
                    }
                });
                const arrayBuffer = await response.arrayBuffer();
                return Array.from(new Uint8Array(arrayBuffer));
            }
            """
            response_data = await page.evaluate(request_script, url)
            return response_data
        except Exception as e:
            logging.info(f'js异步抓包错误 => {e}')
        await asyncio.sleep(3)

async def plw_get_cmm_list(page:Page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    host=basic_info['host']
    commissionId_list=set()
    comid_kol_num_map={}
    p=1
    while True:
        api_url1=f'https://{host}/api/v3/affiliateplatform/gql?q=SearchTargetCampaignQuery'

        data={
                "operationName":"SearchTargetCampaignQuery",
                "query":"\n      query SearchTargetCampaignQuery($pageNum: Int, $pageSize: Int, $commissionId: Long, $commissionIdList: [Long!], $commissionName: String, $periodStartTime: Long, $periodEndTime: Long, $status: CommissionStatus, $affiliateTargetingList: [CommissionAffiliateScope!], $productId: Long, $productName: String, $scene: SearchTargetCampaignScene, $shopName: String, $shopType: CommissionSettingShopType, $affiliateId: Long, $affiliateScope: CommissionAffiliateScope, $commissionType: CommissionType, $commissionStatus: [CommissionStatus!], $needFreeSampleCount: Boolean) {\n        SearchTargetCampaign(\n          pageNum: $pageNum\n          pageSize: $pageSize\n          commissionId: $commissionId\n          commissionIdList: $commissionIdList\n          commissionName: $commissionName\n          periodStartTime: $periodStartTime\n          periodEndTime: $periodEndTime\n          status: $status\n          affiliateTargetingList: $affiliateTargetingList\n          productId: $productId\n          productName: $productName\n          scene: $scene\n          shopName: $shopName\n          shopType: $shopType\n          affiliateId: $affiliateId\n          affiliateScope: $affiliateScope\n          commissionType: $commissionType\n          commissionStatus: $commissionStatus\n          needFreeSampleCount: $needFreeSampleCount\n        ) {\n          pagination {\n            pageNum\n            pageSize\n            totalCount\n            needTotalCount\n          }\n          list {\n            commissionId\n            commissionName\n            periodStartTime\n            periodEndTime\n            status\n            commissionType\n            description\n            lastEditor\n            lastEditTime\n            shopId\n            shopName\n            isSetBudget\n            budget\n            budgetCost\n            commissionSource\n            affiliateTargeting\n            items {\n              name\n              id\n              image\n              rate\n            }\n            left\n            minRate\n            maxRate\n            setAllCategory\n            affiliateNum\n            itemList {\n              name\n              id\n              image\n              rate\n            }\n            itemCount\n            pendingTerminatedTime\n            terminatedTime\n            commissionFreeSample {\n              commissionId\n              freeSampleItemCount\n            }\n          }\n        }\n      }\n      ",
                "variables":{"pageNum":p,"pageSize":20,"scene":"Default","needFreeSampleCount":True,"commissionStatus":["CommissionStatusOngoing","CommissionStatusUpcoming"]}
            }

        json_data=await plw_make_request(page,api_url1,data=data)
        if json_data:
            clist=json_data['data']['SearchTargetCampaign']['list']
            for citem in clist:
                commissionId_list.add(citem['commissionId'])
                comid_kol_num_map[citem['commissionId']]=citem['affiliateNum']
            logging.info(f'店铺:《{dpName}》,{bid},第 {p} 页,获取 {len(clist)} 个达人计划,当前总数:{len(commissionId_list)}')
            if p==1:
                totalCount=json_data['data']['SearchTargetCampaign']['pagination']['totalCount']
                pageSize=json_data['data']['SearchTargetCampaign']['pagination']['pageSize']
                max_p=math.ceil(totalCount/pageSize)
                logging.info(f'店铺:《{dpName}》,{bid},共 {max_p} 页计划,总数量 {totalCount} 个')
            
            if p>=max_p:
                break

            p+=1


    return comid_kol_num_map

async def plw_xp_spc_cds(page:Page):

    cookies = await page.context.cookies([page.url])
    SPC_CDS=None
    cksstrlist=[]
    for cookie in cookies:
        cksstrlist.append(f"{cookie['name']}={cookie['value']}")
        if cookie['name']=='SPC_CDS':
            SPC_CDS=cookie['value']
            break
    return SPC_CDS

async def plw_get_params(page,basic_info):
    spdc=await plw_xp_spc_cds(page)
    country=basic_info['Country']
    shopid=basic_info['shopid']
    country_region_map={
        '新加坡':'sg',
        '泰国':'th',
        '印尼':'id'
    }
    if basic_info['is_KJ']==1:
   
        return {
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'cnsc_shop_id': shopid,
        'cbsc_shop_region': country_region_map.get(country)
        }

    else:
        return {
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
        }

async def plw_xp_get_shopid(page:Page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid=basic_info['shopid']
    host=basic_info['host']
    if shopid:
        return shopid
    else:
        spdc=await plw_xp_spc_cds(page)
        api_url=f'https://{host}/api/selleraccount/shop_info/'
        params={
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
        }
        json_data=await plw_make_request(page,api_url,params)
        if json_data:
            shopid=str(json_data['data']['shop_id'])
            aff=tms.ExecNoQuery('update houtai set shopid=? where BrowserID=?',(shopid,bid))
            logging.info(f'店铺:《{dpName}》,{bid},获取shopid:{shopid},更新后台表:{aff}')
            return shopid
        else:
            logging.info(f'店铺:《{dpName}》,{bid},获取shopid失败')

async def plw_xp_params(page,basic_info):
    spdc=await plw_xp_spc_cds(page)
    country=basic_info['Country']
    if basic_info['is_KJ']==1:
        shopid=await plw_xp_get_shopid(page,basic_info)
        return {
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'cnsc_shop_id': int(shopid),
        'cbsc_shop_region': REGION_MAP[country]
        }

    else:
        return {
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
        }

async def plw_xp_get_pv(page:Page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    api_url=f'https://{host}/api/mydata/v3/product/performance/'
    p=1
    pz=50
    spdc=await plw_xp_spc_cds(page)
    if not spdc:
        return
    st_time,ed_time=get_shopee_performance_timestamp('m')

    err_count=0
    itemids_has_pv=[]
    itemids_tow_atc=[]
    all_items=[]
    max_p=20
    while True:
        params={
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
            'start_time': st_time,
            'end_time': ed_time,
            'period': 'past30days',
            'metric_ids': 'all',
            'order_by': 'confirmed_buyers.desc',
            'page_size': pz,
            'page_num': p,
            'category_type': 'shopee',
            'category_id': -1
        }
        json_data= await plw_make_request(page,api_url,params)
        if json_data:
            if json_data['code']==0:
                pitems=json_data['result']['items']
                all_items.extend(pitems)
                for pitem in pitems:
                    if pitem['add_to_cart_buyers']>0:
                        if country=='印尼':
                            if pitem['add_to_cart_buyers']>1:
                                itemids_tow_atc.append(str(pitem['id']))
                        elif country=='马来西亚':
                            itemids_tow_atc.append(str(pitem['id']))
                    elif pitem['pv']:
                        itemids_has_pv.append(str(pitem['id']))
                    
                if p==1:
                    total=json_data['result']['total']
                    max_p=math.ceil(total/pz)
                p+=1
            else:
                err_count+=1
        else:
            err_count+=1
        if p>max_p:
            break
        if err_count>10:
            return
    logging.info(f'店铺:《{dpName}》,{bid},共 {len(all_items)}个报表数据,total:{total},最近90天有流量产品共:{len(itemids_has_pv)},去重后:{len(set(itemids_has_pv))}')
    return set(itemids_has_pv),set(itemids_tow_atc)
        
async def plw_xp_get_prolist(page:Page,basic_info,list_type='live_all'):

    '''虾皮获取所有在售产品列表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    spdc=await plw_xp_spc_cds(page)


    api_url=f'https://{host}/api/v3/opt/mpsku/list/v2/get_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 48,
        'list_type': list_type,
        'need_ads': True
    }
    pro_list=[]
    max_p=None
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过三次弹出错误')
            return
        p=req_p.get()
        params['page_number']=p
        js_data=await plw_make_request(page,api_url,params)
        if js_data is None or (js_data and js_data.get('code')!=0):
            print(f'店铺:《{dpName}》,{bid},第 {p} 页出错 => {js_data}')
            err_count+=1
            await asyncio.sleep(3)
            #await plw_repeat_goto(page,f'https://{host}/')
            req_p.put(p)
            continue

        page_info=js_data['data']['page_info']
        if page_info['total']==0:
            return []
        
        pros=js_data['data']['products']
        
        if p==1:
            pz=page_info['page_size']
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        pro_list.extend(pros)
        #logging.info(f'店铺:《{dpName}》,{bid},{list_type}第 {p} 页,获取 {len(pros)} 个产品')
    
    logging.info(f'店铺:《{dpName}》,{bid},{list_type},共 {max_p} 页,获取 {len(pro_list)} 个产品')
    return pro_list

async def plw_xp_search_prolist(page:Page,basic_info,list_type='all'):

    '''虾皮获取所有产品'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    spdc=await plw_xp_spc_cds(page)

    api_url=f'https://{host}/api/v3/opt/mpsku/list/v2/search_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_size': 48,
        'list_type': list_type,
        'operation_sort_by': 'recommend_v2',
        'need_ads': True
    }
    pro_list=[]
    err_count=0
    max_p=None
    p=1
    while True:

        if err_count>4:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过4次弹出错误')
            return
        if p>1:
            params['cursor']=next_cursor
        js_data=await plw_make_request(page,api_url,params)
        if js_data is None or (js_data and js_data.get('code')!=0):
            print(js_data)
            err_count+=1
            await asyncio.sleep(5)
            await plw_repeat_goto(page,f'https://{host}/')
            continue

        page_info=js_data['data']['page_info']
        if page_info['total']==0:
            return []
        
        pros=js_data['data']['products']
        
        if p==1:
            pz=48
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
        pro_list.extend(pros)
        next_cursor=page_info['cursor']
        if next_cursor=='':
            break
        p+=1
    
    logging.info(f'店铺:《{dpName}》,{bid},{list_type},共 {max_p} 页,获取 {len(pro_list)} 个产品')
    return pro_list

async def plw_xp_get_deletedlist(page:Page,basic_info):

    '''虾皮获取草稿箱所有产品'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    spdc=await plw_xp_spc_cds(page)


    api_url=f'https://{host}/api/v3/mpsku/list/v2/get_banned_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 48,
        'list_type': 'deleted',
    }
    pro_list=[]
    max_p=None
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过三次弹出错误')
            return
        p=req_p.get()
        params['page_number']=p
        js_data=await plw_make_request(page,api_url,params)
        if js_data is None:
            err_count+=1
            await asyncio.sleep(5)
            await plw_repeat_goto(page,f'https://{host}/')
            req_p.put(p)
            continue

        if not js_data.get('data'):
            print(js_data)
            await asyncio.sleep(5)
            req_p.put(p)
            continue
        if not js_data['data'].get('products'):

            if not js_data['data'].get('page_info'):
                await asyncio.sleep(5)
                req_p.put(p)
                continue
            else:
                if js_data['data']['page_info']['total']==0:
                    break
            
            
        pros=js_data['data']['products']
        
        logging.info(f'店铺:《{dpName}》,{bid},第 {p} 页被虾皮删除产品 {len(pros)} 个')
        if p==1:
            page_info=js_data['data']['page_info']
            if page_info['total']==0:
                return []
            pz=page_info['page_size']
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        await asyncio.sleep(5)
        pro_list.extend(pros)
    
    print(f'店铺:《{dpName}》,{bid},共 {max_p} 页,获取 {len(pro_list)} 个被虾皮删除产品')
    return pro_list

async def plw_xp_get_draftlist(page:Page,basic_info,target_count=0):

    '''虾皮获取草稿箱所有产品'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    spdc=await plw_xp_spc_cds(page)


    api_url=f'https://{host}/api/v3/mpsku/list/v2/get_draft_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 100,
        'qc_status': 'all',
    }
    pro_list=[]
    max_p=None
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过三次弹出错误')
            return
        p=req_p.get()
        params['page_number']=p
        js_data=await plw_make_request(page,api_url,params)
        if js_data is None:
            err_count+=1
            await asyncio.sleep(5)
            await plw_repeat_goto(page,f'https://{host}/')
            req_p.put(p)
            continue

        if not js_data.get('data'):
            print(js_data)
            await asyncio.sleep(5)
            req_p.put(p)
            continue
        if not js_data['data'].get('products'):

            if not js_data['data'].get('page_info'):
                print(js_data)
                await asyncio.sleep(5)
                req_p.put(p)
                continue
            else:
                if js_data['data']['page_info']['total']==0:
                    break
        
        
            
            
        pros=js_data['data']['products']
        
        logging.info(f'店铺:《{dpName}》,{bid},第 {p} 页草稿箱获取产品 {len(pros)} 个')
        if p==1:
            page_info=js_data['data']['page_info']
            if page_info['total']==0:
                return []
            pz=100
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        await asyncio.sleep(5)
        
        
        pro_list.extend(pros)
        if target_count!=0 and len(pro_list)>=target_count:
            break
    
    print(f'店铺:《{dpName}》,{bid},共 {max_p} 页,获取 {len(pro_list)} 个草稿箱产品')
    return pro_list

async def plw_xp_del_pro(page:Page,basic_info,itemid_list):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    params=await plw_xp_params(page,basic_info)
    params['version']='3.1.0'
    api_url=f'https://{host}/api/v3/product/delete_product/'
    data={"product_id_list":itemid_list}

    json_data=await plw_make_request(page,api_url,params,data)

    return json_data

async def plw_xp_del_pros(page:Page,basic_info,itemid_list):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    api_url=f'https://{host}/api/v3/product/delete_product/'
    params=await plw_xp_params(page,basic_info)
    params['version']='3.1.0'
    itemids_suc_del=[]
    for j in range(math.ceil(len(itemid_list)/48)):
        cur_itemid_list=itemid_list[j*48:(j+1)*48]
        data={"product_id_list":[int(cur_itemid_str) for cur_itemid_str in cur_itemid_list]}
        res_del=await plw_make_request(page,api_url,params,data)
        if res_del:
            if res_del['code']==0:
                del_list=res_del['data']['result']
                for del_data in del_list:
                    if del_data['code']==0:
                        itemids_suc_del.append(str(del_data['id']))
        
                logging.info(f'店铺:《{dpName}》,{bid},第 {j+1} 页,成功删除:{len(del_list)} 个产品')
            else:
                logging.info(f'店铺:《{dpName}》,{bid},第 {j+1} 页,删除失败 => {res_del}')
                await asyncio.sleep(2)

    logging.info(f'店铺:《{dpName}》,{bid},删除完毕,需删除:{len(itemid_list)},成功删除:{len(itemids_suc_del)}')
    return itemids_suc_del

async def plw_xp_del_draftpro(page:Page,basic_info,itemid_list):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    params=await plw_xp_params(page,basic_info)
    api_url=f'https://{host}/api/tool/mass_product/delete_product/'
    data={"unpublished_ids":itemid_list}

    json_data=await plw_make_request(page,api_url,params,data)

    return json_data

async def plw_get_list_count(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    spdc=await plw_xp_spc_cds(page)
    api_url=f'https://{host}/api/v3/mpsku/list/v2/get_list_count'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'list_types': 'live_all,delisted,draft',
        'list_type_for_quick_filter': 'live_all'
    }
    
    json_data=await plw_make_request(page,api_url,params)
    count_infos=json_data['data']['count_infos']
    print(count_infos)
    unpublish_count=0
    for count_info in count_infos:
        if count_info['list_type']=='live_all':
            live_count=count_info['count']
        elif count_info['list_type'] in ['delisted','draft']:
            unpublish_count+=count_info['count']
    if host=='seller.shopee.co.th':
        max_publish=1000-live_count-unpublish_count
    else:
        max_publish=2000-live_count-unpublish_count
    need_publish_count=40-unpublish_count
    if need_publish_count>max_publish:
        need_publish_count=max_publish
    logging.info(f'店铺:《{dpName}》,bid:{bid},已上架:{live_count},已下架:{unpublish_count},需上架:{need_publish_count}')
    return need_publish_count

async def plw_get_can_publish(page:Page,basic_info,st=1):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']

    api_url2=f'https://{host}/api/v3/opt/mpsku/list/get_product_count_limit'
    params2=await plw_get_params(page,basic_info)
    json_data2=await plw_make_request(page,api_url2,params2)
    
    if json_data2:
        current_published=json_data2['data']['current_published']
        publish_limit=json_data2['data']['publish_limit']
        unlisted_count=json_data2['data']['unlisted_count']
        if st==2:
            publish_limit=1000 if publish_limit>1000 else publish_limit
        can_count=publish_limit-current_published-unlisted_count

         
        logging.info(f'店铺:《{dpName}》,bid:{bid},发布总量:{publish_limit},已发布:{current_published},剩余额度:{can_count},可发布:{can_count}')
        return can_count

async def plw_xp_get_product_count_limit(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']

    api_url2=f'https://{host}/api/v3/opt/mpsku/list/get_product_count_limit'
    params2=await plw_get_params(page,basic_info)
    json_data2=await plw_make_request(page,api_url2,params2)
    
    if json_data2:
        publish_limit=json_data2['data']['publish_limit']
        logging.info(f'店铺:《{dpName}》,bid:{bid},产品额度:{publish_limit}')
        return publish_limit

async def plw_xp_get_list_count(page:Page,basic_info):
    '''虾皮获取产品概况'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    spdc=await plw_xp_spc_cds(page)
    api_url=f'https://{host}/api/v3/mpsku/list/v2/get_list_count'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'list_types': 'all,live_all,restock,review_listing_detail,improve_new_product,banned,deboosted,deleted,reviewing,delisted,draft',
        'list_type_for_quick_filter': 'live_all'
    }
    list_info={}
    json_data=await plw_make_request(page,api_url,params)
    count_infos=json_data['data']['count_infos']
    #print(count_infos)
    count_info_dict={count_info['list_type']:count_info['count'] for count_info in count_infos}
    list_info['live']=count_info_dict.get('live_all')
    list_info['banned']=count_info_dict.get('banned')
    list_info['deboosted']=count_info_dict.get('deboosted')
    list_info['shopee_deleted']=count_info_dict.get('deleted')
    list_info['under_shopee_review']=count_info_dict.get('reviewing')
    list_info['delisted']=count_info_dict.get('delisted')
    list_info['draft']=count_info_dict.get('draft')
    list_info['all']=count_info_dict.get('all')
    return list_info


async def plw_xp_downlond_basic_mb(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    spdc=await plw_xp_spc_cds(page)
    params={
        'timestamp':int(time.time()*1000),
        'SPC_CDS':spdc,
        'SPC_CDS_VER':2
        }
    api_down_url=f'https://{host}/api/tool/mass_product/download_basic_template/'
    cot_byte_list= await plw_get_cot(page,api_down_url,params)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},通用模版,下载长度{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
        # 用 pandas 读取再写出干净的 Excel 文件
        execel_path=f'官方模版表/{bid}_通用模版.xlsx'
        with open(execel_path,'wb') as f:
            f.write(excel_content)
        
        fix_excel_xml(execel_path,execel_path)
        return execel_path

def is_valid_name(pname: str, infringing_words: list[str]) -> bool:
    pname_low = pname.lower()
    return not any(word.lower() in pname_low for word in infringing_words)

def alter_1688sku(rp,basic_info):

    country=basic_info['Country']
    exchange_rate=float(basic_info['exchange_rate'])
    price_ratio=5
    min_price=int(69*exchange_rate)
    for item in rp:
        if country in ['泰国','马来西亚','越南']:
            if len(item['yntitle'])>120:
                ctit=item['yntitle']
                sjm_part=ctit[-11:]
                bt_part=ctit[:-11]
                item['yntitle']=bt_part[:109]+sjm_part
        if not item['pdec']:
            item['pdec'] = item['yntitle']
        item['pdec']=item['pdec'].replace('Amazon','').replace('AliExpress','')
        if item['pdec'] and len(item['pdec']) > 2500:
            item['pdec'] = item['pdec'][:2500]
        if item['sku_price']<min_price:
            item['sku_price']=min_price

    # 处理 sku1 和 sku2 重复的情况
    rp_dict = {}
    for r in rp:
        item_id = r['itemid']
        if rp_dict.get(item_id):
            rp_dict[item_id].append(r)
        else:
            rp_dict[item_id] = [r]
        
    
    for itemid,pskulist in rp_dict.items():
        cn_sku1_to_ynsku1={}
        cn_sku2_to_ynsku2={}

        for pitem in pskulist:
            if pitem['cn_sku1']:
                ynsku = pitem['sku1']
                cn_sku1_to_ynsku1[pitem['cn_sku1']] = ynsku[:17]
            if pitem['cn_sku2']:
                ynsku = pitem['sku2']
                cn_sku2_to_ynsku2[pitem['cn_sku2']] = ynsku[:17]

        for zddd in [cn_sku1_to_ynsku1,cn_sku2_to_ynsku2]:

            ynsku_dict=defaultdict(list)
            for cnsku,ynsku in zddd.items():
                ynsku_dict[ynsku].append((cnsku,ynsku))
            
            for rp_ynsku,skulll in ynsku_dict.items():

                if len(skulll)>1:
                    for jjj in range(len(skulll)):
                        if jjj==0:
                            continue
                        zw_sku=skulll[jjj][0]
                        zddd[zw_sku]=f'{rp_ynsku}_{jjj}'

        piii=0
        min_p=min(pskulist, key=lambda x: x["sku_price"])['sku_price']
        max_p=max(pskulist, key=lambda x: x["sku_price"])['sku_price']
        if max_p/min_p>price_ratio:
            min_p=math.ceil(max_p/price_ratio)
        for pitem in pskulist:
            if pitem['cn_sku1']:
                pitem['sku1']=cn_sku1_to_ynsku1[pitem['cn_sku1']]
            if pitem['cn_sku2']:
                pitem['sku2']=cn_sku2_to_ynsku2[pitem['cn_sku2']]


            if not pitem['sku1_img']:
                for pkj in range(len(pskulist)):
                    if pskulist[pkj]['sku1_img']:
                        pitem['sku1_img']=pskulist[pkj]['sku1_img']
                        break
            if pitem['sku_price']<min_p:
                pitem['sku_price']=min_p
            piii+=1


    final_result = []
    for item_id, records in rp_dict.items():
        grouped_by_sku1 = defaultdict(list)
        cur_sku2s=defaultdict(set)
        for record in records:
            grouped_by_sku1[record['sku1']].append(record)
            if not record['sku2']:
                record['sku2']=''
            cur_sku2s[record['sku1']].add(record['sku2'])
        current_count = 0
        item_group = []
        #logging.info(cur_sku2s.values())
        set_counts = Counter(tuple(sorted(s)) for s in cur_sku2s.values())
        most_common_set, count = set_counts.most_common(1)[0]
        for sku1, sku_records in grouped_by_sku1.items():
            if cur_sku2s[sku1]==set(most_common_set):
                if current_count + len(sku_records) <= 80:
                    item_group.extend(sku_records)
                    current_count += len(sku_records)
                else:
                    break
        final_result.append(item_group)
    #final_result=list(rp_dict.values())
    return final_result

async def plw_get_excel_pro(page:Page,basic_info,total_pcount):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    country=basic_info['Country']
    country_wystr=''
    shopid=await plw_xp_get_shopid(page,basic_info)
    exchange_rate=float(basic_info['exchange_rate'])
    price_times=2.2
    if basic_info['price_times']:
        price_times=float(basic_info['price_times'])
    if country=='印尼':
        country_wystr=''' AND emarket_json like '%"shopee印尼"%' '''
        zd_ptit='yntitle'
        zd_pdes='yndes'
        zd_is_fy_sku='is_fy_sku'
        zd_is_fy_tit='is_yntit'
        zd_sku1='ynsku1'
        zd_sku2='ynsku2'
        zd_dc_bid='distribution_export_bid'
        zd_dc_shopid='distribution_export_shopid'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as int'

    elif country=='泰国':
        country_wystr=''' AND emarket_json like '%"shopee泰国"%' '''
        zd_ptit='thtitle'
        zd_pdes='thdes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_thtit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'
        zd_dc_bid='th_dc_bid'
        zd_dc_shopid='th_dc_shopid'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'

    elif country=='马来西亚':
        country_wystr='''AND NOT EXISTS (
                        SELECT 1 FROM ShopeeAdsMove m
                        WHERE m.to_country='马来西亚' and m.random_code=p.random_code
                    ) '''
        zd_ptit='mltitle'
        zd_pdes='mldes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_mltit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'
        zd_dc_bid='ml_dc_bid'
        zd_dc_shopid='ml_dc_shopid'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'


    lname_s1688_dict={
        'Product Name':'yntitle',
        'Product Description':'pdec',
        'Parent SKU':'itemid',
        'Variation Integration No.':'itemid',
        'Variation Name1':'sku1_name',
        'Variation Name2':'sku2_name',
        'Option for Variation 1':'sku1',
        'Option for Variation 2':'sku2',
        'Price':'sku_price',
        'Image per Variation':'sku1_img',
        'SKU':'skuid',
        'Cover image':'image',
        'Item Image 1':('images',0),
        'Item Image 2':('images',1),
        'Item Image 3':('images',2),
        'Item Image 4':('images',3),
        'Item Image 5':('images',4),
        'Item Image 6':('images',5),
        'Item Image 7':('images',6),
        'Item Image 8':('images',7)
        
    }

    lname_gd_dict={
        'Stock':'100',
        'Weight':'10',
        'Reguler (Cashless)':'On'
    }
    yybm_filter_str=''
    rs_yybm_filter= tms.ExecQuerydict('select * from ShopeeADDistribution where YYBM=?',(yybm,))

    if len(rs_yybm_filter)>0:

        price_export=rs_yybm_filter[0]['price_export']
        if price_export:
            min_price,max_price=price_export.split('-')
            yybm_filter_str+=f'AND (price between {min_price} and {max_price}) '

        historical_sold_export=rs_yybm_filter[0]['historical_sold_export']
        if historical_sold_export:
            min_sold,max_sold=historical_sold_export.split('-')
            yybm_filter_str+=f'AND (historical_sold between {min_sold} and {max_sold}) '
    kwgender_str=''
    if yybm=='id9999':
        logging.info(f'店铺:《{dpName}》,{bid},运营编码:{yybm},《印尼大数据》发布ATC优秀,CR优秀,CPC优秀')
        kwgender_str='AND p.keword_gender<4'
    elif yybm=='mylxh777':
        logging.info(f'店铺:《{dpName}》,{bid},运营编码:{yybm},《马来大数据》发布ATC优秀,CR优秀')
        kwgender_str='AND p.keword_gender<3'
    
    sqlstr=f'''
            WITH Base AS (
                SELECT 
                    itemid,keword_gender
                FROM S1688Pro p
                    WHERE {zd_dc_bid} is null
                    AND {zd_is_fy_sku} = 1 
                    AND {zd_is_fy_tit} = 1
                    AND (shop_name like '%公司%' or shop_name like '%厂%')
                    AND NOT EXISTS (
                        SELECT 1 FROM Blacklist b
                        WHERE b.word_language = '中文'
                        AND p.pname LIKE '%' + b.word_blacklist + '%'
                    )
                    AND NOT EXISTS (
                        SELECT 1 FROM Blacklist b
                        WHERE b.word_language = '印尼语'
                        AND p.yntitle LIKE '%' + b.word_blacklist + '%'
                    )
                    {yybm_filter_str}
                    {country_wystr}
                    {kwgender_str}
            )
            SELECT 
                    sp.itemid,
                    sp.{zd_ptit} + ' |' + sp.random_code + '|' AS yntitle,
                    sp.sku1_name,
                    sp.sku2_name,
                    sp.image,
                    sp.images,
                    sk.sku1_img,
                    sk.sku1 AS cn_sku1,
                    sk.sku2 AS cn_sku2,
                    sk.{zd_sku1} AS sku1,
                    sk.{zd_sku2} AS sku2,
                    sk.skuid,
                    sku_price AS cn_price,
                    cast({zd_price}) AS sku_price,
                    sp.{zd_pdes} AS pdec
                FROM 
                    (SELECT TOP {total_pcount} itemid from Base order by keword_gender) as temp
                JOIN 
                    S1688Pro sp ON sp.itemid = temp.itemid
                JOIN 
                    S1688ProSKU sk ON sp.itemid = sk.itemid;
        '''

    while True:

        
        with open('状态锁/s1688大数据导出.json','r',encoding='utf-8') as f:
            status_json=json.load(f)
        if status_json['status']==1:
            await asyncio.sleep(3)
            continue
        else:
            cur_hz=getTimeStr()
            status_json['status']=1
            with open('状态锁/s1688大数据导出.json','w',encoding='utf-8') as f:
                json.dump(status_json,f,ensure_ascii=False,indent=4)
            rp=tms.ExecQuerydict(sqlstr)
            if len(rp)>0:
                dc_itemids=set([r['itemid'] for r in rp])
                itemids_wstr=','.join([f"'{itemid}'" for itemid in dc_itemids])
                aff_mark=tms.ExecNoQuery(f"update S1688Pro set {zd_dc_bid}='{bid}',{zd_dc_shopid}='{shopid}' where itemid in ({itemids_wstr})")
                logging.info(f'店铺:《{dpName}》,{bid},标记 {aff_mark} 个产品')

            status_json['status']=0
            with open('状态锁/s1688大数据导出.json','w',encoding='utf-8') as f:
                json.dump(status_json,f,ensure_ascii=False,indent=4)
            break

    if len(rp)==0:
        logging.info(f'店铺:《{dpName}》,{bid},当前无产品可导出')
        return 0


    smt_rp=alter_1688sku(rp,basic_info)
    for _ in range(3):
        mbpath = await plw_xp_downlond_basic_mb(page,basic_info)
        if mbpath:
            break
    
    if mbpath is None:
        logging.info(f'店铺:《{dpName}》,{bid},下载通用模版失败')
        return
    
    dcmr=f'大数据导出/{country}/{bid}/1688pro_{cur_hz}'
    os.makedirs(dcmr,exist_ok=True)
    cpcount=len(smt_rp)
    pc=200
    for fj in range(math.ceil(cpcount/pc)):
        cur_smt_rp=smt_rp[fj*pc:(fj+1)*pc]
        if len(cur_smt_rp)==0:
            break
        wb=openpyxl.load_workbook(mbpath)
        ws=wb['Template']
        mb_tits=[ws.cell(3,col).value for col in range(1,ws.max_column+1)]
        row=7
        pskudatas=[]
        for pii_lsit in cur_smt_rp:
            pskudatas.extend(pii_lsit)
        allimgs=[]
        pskudatas.sort(key=lambda x: (x['itemid'],x['sku1'], x['sku2']))
        for pro in pskudatas:
            if pro['images']:
                try:
                    allimgs=json.loads(pro['images'])
                except Exception as e:
                    pass
            try:
                for tit_col_ind in range(len(mb_tits)):
                    rol=tit_col_ind+1
                    lname=mb_tits[tit_col_ind]


                    if lname in lname_s1688_dict:
                        czddd=lname_s1688_dict[lname]
                        if isinstance(czddd,str):
                            ws.cell(row=row,column=rol).value=pro[czddd]
                        else:
                            ccc_ind=czddd[1]
                            if ccc_ind<len(allimgs):
                                ws.cell(row=row,column=rol).value=allimgs[ccc_ind]

                    elif lname in lname_gd_dict:
                        if lname=='Stock':
                            if pro['cn_price']<15 or pro['cn_price']>60:
                                ws.cell(row=row,column=rol).value=0
                            else:
                                ws.cell(row=row,column=rol).value=lname_gd_dict[lname]
                        
                        else:
                            ws.cell(row=row,column=rol).value=lname_gd_dict[lname]

                row+=1
            except Exception as e:
                logging.info(f'第{row}行,第{rol}列填写错误 =>{e}')
        ex_path=f'{dcmr}/{yybm}_{fj+1}_{len(cur_smt_rp)}_{cur_hz}.xlsx'
        wb.save(ex_path)
        logging.info(f'店铺:《{dpName}》,{bid},第 {fj+1} 份文件,{len(cur_smt_rp)} 个产品 => 《{ex_path}》,成功导出')

    return dcmr

async def plw_get_excel_thpro(page:Page,basic_info,total_pcount):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    country=basic_info['Country']
    country_wystr=''
    shopid=await plw_xp_get_shopid(page,basic_info)
    exchange_rate=float(basic_info['exchange_rate'])
    price_times=2.2
    if basic_info['price_times']:
        price_times=float(basic_info['price_times'])
    if country=='印尼':
        country_wystr=''' AND emarket_json like '%"shopee印尼"%' '''
        zd_ptit='yntitle'
        zd_pdes='yndes'
        zd_is_fy_sku='is_fy_sku'
        zd_is_fy_tit='is_yntit'
        zd_sku1='ynsku1'
        zd_sku2='ynsku2'
        zd_dc_bid='distribution_export_bid'
        zd_dc_shopid='distribution_export_shopid'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as int'

    elif country=='泰国':
        country_wystr=''' AND emarket_json like '%"shopee泰国"%' '''
        zd_ptit='thtitle'
        zd_pdes='thdes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_thtit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'
        zd_dc_bid='th_dc_bid'
        zd_dc_shopid='th_dc_shopid'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'




    lname_s1688_dict={
        'Product Name':'yntitle',
        'Product Description':'pdec',
        'Parent SKU':'itemid',
        'Variation Integration No.':'itemid',
        'Variation Name1':'sku1_name',
        'Variation Name2':'sku2_name',
        'Option for Variation 1':'sku1',
        'Option for Variation 2':'sku2',
        'Price':'sku_price',
        'Image per Variation':'sku1_img',
        'SKU':'skuid',
        'Cover image':'image',
        'Item Image 1':('images',0),
        'Item Image 2':('images',1),
        'Item Image 3':('images',2),
        'Item Image 4':('images',3),
        'Item Image 5':('images',4),
        'Item Image 6':('images',5),
        'Item Image 7':('images',6),
        'Item Image 8':('images',7)
        
    }

    lname_gd_dict={
        'Stock':'100',
        'Weight':'10',
        'Reguler (Cashless)':'On'
    }
 
    tomm_str="AND ship_tag like '%明天达%'"
    sqlstr=f'''
            WITH Base AS (
                SELECT 
                    itemid,keword_gender
                FROM S1688Pro p
                    WHERE {zd_dc_bid} is null
                    AND {zd_is_fy_sku} = 1 
                    AND {zd_is_fy_tit} = 1
                    AND copyright_th is null
                    AND historical_sold>=100
                    AND price between 20 and 100
                    AND (shop_name like '%公司%' or shop_name like '%厂%')
                    AND NOT EXISTS (
                        SELECT 1 FROM Blacklist b
                        WHERE b.word_language = '中文'
                            AND p.pname LIKE '%' + b.word_blacklist + '%'
                    )
                    AND NOT EXISTS (
                        SELECT 1 FROM Blacklist b
                        WHERE b.word_language = '印尼语'
                            AND p.yntitle LIKE '%' + b.word_blacklist + '%'
                    )
                    AND NOT EXISTS (
                        SELECT 1 FROM Blacklist b
                        WHERE b.word_language = '泰语'
                            AND p.thtitle LIKE '%' + b.word_blacklist + '%'
                    )
            )
            SELECT 
                    sp.itemid,
                    sp.{zd_ptit} + ' |' + sp.random_code + '|' AS yntitle,
                    sp.sku1_name,
                    sp.sku2_name,
                    sp.image,
                    sp.images,
                    sk.sku1_img,
                    sk.sku1 AS cn_sku1,
                    sk.sku2 AS cn_sku2,
                    sk.{zd_sku1} AS sku1,
                    sk.{zd_sku2} AS sku2,
                    sk.skuid,
                    sku_price AS cn_price,
                    cast({zd_price}) AS sku_price,
                    sp.{zd_pdes} AS pdec
                FROM 
                    (SELECT TOP {total_pcount} itemid from Base order by keword_gender) as temp
                JOIN 
                    S1688Pro sp ON sp.itemid = temp.itemid
                JOIN 
                    S1688ProSKU sk ON sp.itemid = sk.itemid;
        '''

    while True:

        
        with open('状态锁/s1688泰国侵权测试导出.json','r',encoding='utf-8') as f:
            status_json=json.load(f)
        if status_json['status']==1:
            await asyncio.sleep(3)
            continue
        else:
            cur_hz=getTimeStr()
            status_json['status']=1
            with open('状态锁/s1688泰国侵权测试导出.json','w',encoding='utf-8') as f:
                json.dump(status_json,f,ensure_ascii=False,indent=4)
            rp=tms.ExecQuerydict(sqlstr)
            if len(rp)==0:
                rp=tms.ExecQuerydict(sqlstr.replace(tomm_str,''))
            if len(rp)>0:
                dc_itemids=set([r['itemid'] for r in rp])
                itemids_wstr=','.join([f"'{itemid}'" for itemid in dc_itemids])
                aff_mark=tms.ExecNoQuery(f"update S1688Pro set {zd_dc_bid}='{bid}',{zd_dc_shopid}='{shopid}' where itemid in ({itemids_wstr})")
                logging.info(f'店铺:《{dpName}》,{bid},标记 {aff_mark} 个产品')

            status_json['status']=0
            with open('状态锁/s1688泰国侵权测试导出.json','w',encoding='utf-8') as f:
                json.dump(status_json,f,ensure_ascii=False,indent=4)
            break

    if len(rp)==0:
        logging.info(f'店铺:《{dpName}》,{bid},当前无产品可导出')
        return 0

    smt_rp=alter_1688sku(rp,basic_info)
    for _ in range(3):
        mbpath = await plw_xp_downlond_basic_mb(page,basic_info)
        if mbpath:
            break
    
    if mbpath is None:
        logging.info(f'店铺:《{dpName}》,{bid},下载通用模版失败')
        return
    
    dcmr=f'泰国侵权测试导出/{country}/{bid}/1688pro_{cur_hz}'
    os.makedirs(dcmr,exist_ok=True)
    cpcount=len(smt_rp)
    pc=200
    for fj in range(math.ceil(cpcount/pc)):
        cur_smt_rp=smt_rp[fj*pc:(fj+1)*pc]
        if len(cur_smt_rp)==0:
            break
        wb=openpyxl.load_workbook(mbpath)
        ws=wb['Template']
        mb_tits=[ws.cell(3,col).value for col in range(1,ws.max_column+1)]
        row=7
        pskudatas=[]
        for pii_lsit in cur_smt_rp:
            pskudatas.extend(pii_lsit)
        allimgs=[]
        pskudatas.sort(key=lambda x: (x['itemid'],x['sku1'], x['sku2']))
        for pro in pskudatas:
            if pro['images']:
                try:
                    allimgs=json.loads(pro['images'])
                except Exception as e:
                    pass
            try:
                for tit_col_ind in range(len(mb_tits)):
                    rol=tit_col_ind+1
                    lname=mb_tits[tit_col_ind]


                    if lname in lname_s1688_dict:
                        czddd=lname_s1688_dict[lname]
                        if isinstance(czddd,str):
                            ws.cell(row=row,column=rol).value=pro[czddd]
                        else:
                            ccc_ind=czddd[1]
                            if ccc_ind<len(allimgs):
                                ws.cell(row=row,column=rol).value=allimgs[ccc_ind]

                    elif lname in lname_gd_dict:
                        if lname=='Stock':
                            if pro['cn_price']<10 or pro['cn_price']>100:
                                ws.cell(row=row,column=rol).value=0
                            else:
                                ws.cell(row=row,column=rol).value=lname_gd_dict[lname]
                        
                        else:
                            ws.cell(row=row,column=rol).value=lname_gd_dict[lname]

                row+=1
            except Exception as e:
                logging.info(f'第{row}行,第{rol}列填写错误 =>{e}')
        ex_path=f'{dcmr}/{yybm}_{fj+1}_{len(cur_smt_rp)}_{cur_hz}.xlsx'
        wb.save(ex_path)
        logging.info(f'店铺:《{dpName}》,{bid},第 {fj+1} 份文件,{len(cur_smt_rp)} 个产品 => 《{ex_path}》,成功导出')

    return dcmr

async def plw_get_ads_excel_pro(page:Page,basic_info,rcode_list=None,itemid_list=None,data_type='广告测品导出'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    country=basic_info['Country']
    shopid=await plw_xp_get_shopid(page,basic_info)
    exchange_rate=float(basic_info['exchange_rate'])
    price_times=2.2
    if basic_info['price_times']:
        price_times=float(basic_info['price_times'])
    if country=='泰国':
        zd_ptit='thtitle'
        zd_pdes='thdes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_thtit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'

        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'

    elif country=='马来西亚':
        zd_ptit='mltitle'
        zd_pdes='mldes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_mltit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'

    elif country=='越南':
        zd_ptit='vntitle'
        zd_pdes='vndes'
        zd_is_fy_sku='is_fy_ensku'
        zd_is_fy_tit='is_vntit'
        zd_sku1='ensku1'
        zd_sku2='ensku2'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as float'
    else:
        zd_ptit='yntitle'
        zd_pdes='yndes'
        zd_is_fy_sku='is_fy_sku'
        zd_is_fy_tit='is_yntit'
        zd_sku1='ynsku1'
        zd_sku2='ynsku2'
        zd_price=f'sk.sku_price * {price_times} * {exchange_rate} as int'

        

    lname_s1688_dict={
        'Product Name':'yntitle',
        'Product Description':'pdec',
        'Parent SKU':'itemid',
        'Variation Integration No.':'itemid',
        'Variation Name1':'sku1_name',
        'Variation Name2':'sku2_name',
        'Option for Variation 1':'sku1',
        'Option for Variation 2':'sku2',
        'Price':'sku_price',
        'Image per Variation':'sku1_img',
        'SKU':'skuid',
        'Cover image':'image',
        'Item Image 1':('images',0),
        'Item Image 2':('images',1),
        'Item Image 3':('images',2),
        'Item Image 4':('images',3),
        'Item Image 5':('images',4),
        'Item Image 6':('images',5),
        'Item Image 7':('images',6),
        'Item Image 8':('images',7)
        
    }

    lname_gd_dict={
        'Stock':'100',
        'Weight':'10',
        'Reguler (Cashless)':'On'
    }


    if rcode_list:
        wstr_rcode=','.join([f"'{rcode}'" for rcode in rcode_list])
        wstr=f'where sp.random_code in ({wstr_rcode})'
    elif itemid_list:
        wstr_rcode=','.join([f"'{itemid}'" for itemid in itemid_list])
        wstr=f'where sp.itemid in ({wstr_rcode})'
    
    else:
        return 0

    sqlstr=f'''

            SELECT 
                    sp.itemid,
                    sp.{zd_ptit} + ' |' + sp.random_code + '|' AS yntitle,
                    sp.sku1_name,
                    sp.sku2_name,
                    sp.image,
                    sp.images,
                    sk.sku1_img,
                    sk.sku1 AS cn_sku1,
                    sk.sku2 AS cn_sku2,
                    sk.{zd_sku1} AS sku1,
                    sk.{zd_sku2} AS sku2,
                    sk.skuid,
                    sku_price AS cn_price,
                    cast({zd_price}) AS sku_price,
                    sp.{zd_pdes} AS pdec
                FROM  
                    S1688Pro sp
                JOIN 
                    S1688ProSKU sk ON sp.itemid = sk.itemid
                {wstr}
                AND sp.{zd_is_fy_tit}=1
                AND sp.{zd_is_fy_sku}=1
        '''


    cur_hz=getTimeStr()
    rp=tms.ExecQuerydict(sqlstr)


    if len(rp)==0:
        logging.info(f'店铺:《{dpName}》,{bid},当前无产品可导出')
        return 0


    smt_rp=alter_1688sku(rp,basic_info)
    for _ in range(3):
        mbpath = await plw_xp_downlond_basic_mb(page,basic_info)
        if mbpath:
            break
    
    if mbpath is None:
        logging.info(f'店铺:《{dpName}》,{bid},下载通用模版失败')
        return
    

    dcmr=f'{data_type}/{country}/{bid}_{cur_hz}'


    os.makedirs(dcmr,exist_ok=True)
    cpcount=len(smt_rp)
    pc=200
    for fj in range(math.ceil(cpcount/pc)):
        cur_smt_rp=smt_rp[fj*pc:(fj+1)*pc]
        if len(cur_smt_rp)==0:
            break
        wb=openpyxl.load_workbook(mbpath)
        ws=wb['Template']
        mb_tits=[ws.cell(3,col).value for col in range(1,ws.max_column+1)]
        row=7
        pskudatas=[]
        for pii_lsit in cur_smt_rp:
            pskudatas.extend(pii_lsit)
        allimgs=[]
        pskudatas.sort(key=lambda x: (x['itemid'],x['sku1'], x['sku2']))
        for pro in pskudatas:
            if pro['images']:
                try:
                    allimgs=json.loads(pro['images'])
                except Exception as e:
                    pass
            try:
                for tit_col_ind in range(len(mb_tits)):
                    rol=tit_col_ind+1
                    lname=mb_tits[tit_col_ind]


                    if lname in lname_s1688_dict:
                        czddd=lname_s1688_dict[lname]
                        if isinstance(czddd,str):
                            ws.cell(row=row,column=rol).value=pro[czddd]
                        else:
                            ccc_ind=czddd[1]
                            if ccc_ind<len(allimgs):
                                ws.cell(row=row,column=rol).value=allimgs[ccc_ind]

                    elif lname in lname_gd_dict:
                        if lname=='Stock':
                            if pro['cn_price']<10 or pro['cn_price']>100:
                                ws.cell(row=row,column=rol).value=0
                            else:
                                ws.cell(row=row,column=rol).value=lname_gd_dict[lname]
                        
                        else:
                            ws.cell(row=row,column=rol).value=lname_gd_dict[lname]

                row+=1
            except Exception as e:
                logging.info(f'第{row}行,第{rol}列填写错误 =>{e}')
        ex_path=f'{dcmr}/{yybm}_{fj+1}_{len(cur_smt_rp)}_{cur_hz}.xlsx'
        wb.save(ex_path)
        logging.info(f'店铺:《{dpName}》,{bid},第 {fj+1} 份文件,{len(cur_smt_rp)} 个产品 => 《{ex_path}》,成功导出')

    return dcmr

async def plw_xp_get_report(page:Page,file_names,basic_info):

    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    spdc=await plw_xp_spc_cds(page)
    api_url=f'https://{host}/api/tool/mass_product/get_mass_record_list/'
    params={'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
            'page_number': 1,
            'page_size': 20,
            'operation_type': 2
        }
    resppp={}
    max_wait=0
    while True:
        js_data=await plw_make_request(page,api_url,params)
        rp=js_data['data']['list']
        if rp:
            for rrr in rp:
                cfile_name=rrr['user_file_name'] 
                if cfile_name in file_names:
                    record_status=rrr["record_status"]
                    logging.info(f'店铺:《{dpName}》,{bid},等待文件《{cfile_name}》上传,当前状态:{record_status}')
                    if record_status not in [2,3]:
                        resppp[cfile_name]=copy.copy(rrr)

        if set(resppp.keys())==set(file_names):
            logging.info(f'店铺:《{dpName}》,{bid},{len(file_names)} 个文件已全部等待完毕')
            return resppp
        await asyncio.sleep(10)
        max_wait+=1
        if max_wait>60:
            logging.info(f'店铺:《{dpName}》,{bid},{len(file_names)},等待超时退出')
            return resppp
            
def get_parentid_from_excel(excel_path):
    pid_list=set()
    wb=openpyxl.load_workbook(excel_path)
    ws=wb['Template']
    header_line=[ws.cell(row=3,column=col).value.strip() for col in range(1,ws.max_column+1)]
    pid_col=header_line.index('Parent SKU')+1
    for row in range(7,ws.max_row+1):
        cpid=ws.cell(row=row,column=pid_col).value
        if isinstance(cpid,str):
            pid_list.add(cpid)
    
    return pid_list


def get_shopee_performance_timestamp(t='r'):
    # 定义时区 UTC+7

    utc_plus_7 = datetime.timezone(datetime.timedelta(hours=7))
    now = datetime.datetime.now(utc_plus_7)
    today_utc7 = datetime.datetime.now(utc_plus_7).replace(hour=0, minute=0, second=0, microsecond=0)
    yesterday_utc7= (today_utc7 - datetime.timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
    if t=='r':
        date_start = today_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
        date_end = now.replace(minute=0, second=0, microsecond=0)

    elif t=='y':
        date_start = yesterday_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
        date_end = today_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
    
    elif t=='w':
        last_week_toady_utc7=(today_utc7 - datetime.timedelta(days=7)).replace(hour=0, minute=0, second=0, microsecond=0)
        date_start = last_week_toady_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
        date_end = today_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
    
    elif t=='m':
        last_month_utc7 = (today_utc7 - datetime.timedelta(days=30)).replace(hour=0, minute=0, second=0, microsecond=0)
        date_start = last_month_utc7.replace(hour=0, minute=0, second=0, microsecond=0)
        date_end = today_utc7.replace(hour=0, minute=0, second=0, microsecond=0)

    st_time=int(date_start.timestamp())
    ed_time=int(date_end.timestamp())

    return st_time,ed_time

async def plw_xp_get_pro_performance(page:Page,basic_info,t='r'):

    '''虾皮采集各个时间段产品流量报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    shopid=basic_info['shopid']
    is_kj=basic_info['is_KJ']

    
    # cnsc_shop_id: 1521278937
    # cbsc_shop_region: th
    spdc=await plw_xp_spc_cds(page)
    if not spdc:
        return
    t_map={
        'r':'小时',
        'y':'昨日',
        'w':'七天',
        'm':'三十天'
    }
    period_map={
        'r':'real_time',
        'y':'yesterday',
        'w':'past7days',
        'm':'past30days'
    }
    err_count=0

    st_time,ed_time=get_shopee_performance_timestamp(t)
    
    all_items=[]
    p=1
    max_p=20
    pz=20
    api_url=f'https://{host}/api/mydata/v3/product/performance/'
    if is_kj:
        api_url=f'https://{host}/api/mydata/cnsc/shop/v3/product/performance/'

    while True:
        params={
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
            'start_time': st_time,
            'end_time': ed_time,
            'period': period_map[t],
            'metric_ids': 'all',
            'order_by': 'placed_units.desc',
            'page_size': pz,
            'page_num': p,
            'category_type': 'shopee',
            'category_id': -1,
            'keyword':''
        }

 


        if is_kj:
            params['cnsc_shop_id']=shopid
            params['cbsc_shop_region']=REGION_MAP[country]

        json_data= await plw_make_request(page,api_url,params)
        if json_data:
            if json_data['code']==0:
                pitems=json_data['result']['items']
                all_items.extend(pitems)

                if p==1:
                    total=json_data['result']['total']
                    max_p=math.ceil(total/pz)
                p+=1
            else:
                err_count+=1
                await asyncio.sleep(1)
        else:
            err_count+=1
        if p>max_p:
            break
        if err_count>10:
            return
    logging.info(f'店铺:《{dpName}》,{bid},时段:{t_map[t]},共采集 {len(all_items)} 个报表数据')
    return all_items

def upload_performance(basic_info,bb_list,tt):

    

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid=basic_info['shopid']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    country=basic_info['Country']
    iskj=basic_info['is_KJ']
    if iskj:
        IMG_HOST_MAP[country]='https://s-cf-sg.shopeesz.com/file/'

    utc_plus_7 = datetime.timezone(datetime.timedelta(hours=7))
    now = datetime.datetime.now(utc_plus_7)
    today=now.date()
    cur_hour=now.hour
    cur_year=now.year
    cur_m=now.month

    
    col_names=tms.GetCol('ShopeeProPerformance')
    col_names2=tms.GetCol('ShopeeSKUProPerformance')
    t_map={
        'r':'小时',
        'y':'昨日',
        'w':'七天',
        'm':'三十天'
    }
    btype=t_map[tt]
    aff=0
    aff_sku=0
    for pro_bb in bb_list:
        # if pro_bb['display_tag_label']=='psd_label_deleted':
        #     continue
        cur_itemid=str(pro_bb['id'])
        cur_pname=pro_bb['name']
        cur_random_code=None
        mat=re.search(r'.*\|(.{8})\|$',cur_pname)
        if mat:
            cur_random_code=mat.group(1)
        
        cur_img=pro_bb['image']
        
        cur_img=f'{IMG_HOST_MAP[country]}{cur_img}'

        pbasic={}

        for k,v in pro_bb.items():
            if k in col_names:
                pbasic[k]=v
        
        pbasic.pop('id')
        pbasic['itemid']=cur_itemid
        pbasic['image']=cur_img
        pbasic['random_code']=cur_random_code
        pbasic['dtype']=btype
        pbasic['bname']=dpName
        pbasic['bid']=bid
        pbasic['shopid']=shopid
        pbasic['yybm']=yybm
        pbasic['country']=country
        if tt=='r':
            pbasic['data_date']=today
            pbasic['time_slot']=cur_hour
        else:
            pbasic['data_date']=f'{cur_year}-{cur_m}-01'
            if tt=='y':
                pbasic['time_slot']=-1
            elif tt=='w':
                pbasic['time_slot']=-2
            elif tt=='m':
                pbasic['time_slot']=-3


        zds=list(pbasic.keys())
        paras=list(pbasic.values())
        zd_on=['itemid','dtype']
        aff+=tms.merge_into('ShopeeProPerformance',zds,paras,zd_on,True)

        if tt=='m':
            pmodels=pro_bb.get('models')
            if pmodels:
                for pmodel in pmodels:
                    sku_basic={}
                    for k,v in pmodel.items():
                        if k in col_names2:
                            sku_basic[k]=v
                    skuname=pmodel['name']
                    sku2=None
                    skulist=skuname.split(',',maxsplit=1)
                    sku1=skulist[0].strip()
                    if len(skulist)==2:
                        sku2=skulist[1].strip()
                    ccc_skuid=sku_basic.pop('id')
                    sku_basic['skuid']=str(ccc_skuid)
                    sku_basic['skuname']=skuname
                    sku_basic['sku1']=sku1
                    sku_basic['sku2']=sku2
                    sku_basic['pname']=cur_pname
                    sku_basic['itemid']=cur_itemid
                    sku_basic['image']=cur_img
                    sku_basic['random_code']=cur_random_code
                    sku_basic['bname']=dpName
                    sku_basic['bid']=bid
                    sku_basic['shopid']=shopid
                    sku_basic['yybm']=yybm
                    sku_basic['country']=country
                    
                    zds2=list(sku_basic.keys())
                    paras2=list(sku_basic.values())
                    zd_on2=['skuid']
                    aff_sku+=tms.merge_into('ShopeeSKUProPerformance',zds2,paras2,zd_on2,True)

    
    logging.info(f'店铺:《{dpName}》,{bid},报表类型:{btype},共 {len(bb_list)} 条数据,保存成功:{aff},保存SKU:{aff_sku}')
    return aff

async def plw_xp_get_reROAS(page:Page,basic_info,itemid):

    '''获取虾皮推荐ROAS'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']

    spdc=await plw_xp_spc_cds(page)
    if not spdc:
        return
    random_uuid = str(uuid.uuid4())
    api_url=f'https://{host}/api/pas/v1/product/list_recommended_roi_two_target/'

    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2
    }

    data={
            "campaign_type":"product_manual",
            "item_id_list":[itemid],
            "reference_id":random_uuid,
            "header":{}
        }
    json_data= await plw_make_request(page,api_url,params,data)
    if not json_data:
        return
    
    if json_data['code']!=0:
        return
    entry_list=json_data['data']['entry_list']
    l_roas=entry_list[0]['lower_bound']['value']
    l_roas=round(l_roas/100000,1)
    e_roas=entry_list[0]['exact']['value']
    e_roas=round(e_roas/100000,1)
    u_roas=entry_list[0]['upper_bound']['value']
    u_roas=round(u_roas/100000,1)

    logging.info(f'店铺:《{dpName}》,{bid},产品:{itemid},成功获取推荐ROAS:{(l_roas,e_roas,u_roas)}')
    return (l_roas,e_roas,u_roas)

def upload_adbb(basic_info,bb_list,tt):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid=basic_info['shopid']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    country=basic_info['Country']
    iskj=basic_info['is_KJ']
    if iskj:
        IMG_HOST_MAP[country]='https://s-cf-sg.shopeesz.com/file/'
    utc_plus_7 = datetime.timezone(datetime.timedelta(hours=7))
    now = datetime.datetime.now(utc_plus_7)
    today=now.date()
    cur_hour=now.hour
    cur_year=now.year
    cur_m=now.month

    
    aff=0
    for pro_bb in bb_list:
        manual_product_ads=pro_bb['manual_product_ads']
        cur_itemid=str(manual_product_ads['item_id'])
        cur_pname=pro_bb['title']
        cur_random_code=None
        
        mat=re.search(r'.*\|(.{8})\|$',cur_pname)
        if mat:
            cur_random_code=mat.group(1)
        campaign=pro_bb['campaign']
        ad_start_time=timestamp_to_sql_datetime(campaign['start_time'])
        rpt=pro_bb['report']
        cur_img=pro_bb['image']
        cur_img=f'{IMG_HOST_MAP[country]}{cur_img}'



        pbasic={
            "bid": bid,
            "shopid": shopid,
            "purl":f'https://{host.replace("seller.","")}/{shopid}/{cur_itemid}/',
            "ad_url":f"https://{host}/portal/marketing/pas/product/manual/{campaign['campaign_id']}",
            "bname": dpName,
            "country":country,
            "sjbm": sjbm,
            "yybm": yybm,
            "data_date": today,
            "dtype": tt,
            "campaign_id": str(campaign['campaign_id']),
            "itemid": cur_itemid,
            "random_code": cur_random_code,
            "daily_budget": campaign['daily_budget']/100000,
            "total_budget": campaign['total_budget']/100000,
            "roi_two_target": campaign['roi_two_target']/100000,
            "start_time": ad_start_time,
            "state": pro_bb['state'],
            "subtype": pro_bb['subtype'],
            "type": pro_bb['type'],
            "title": cur_pname,
            "image": cur_img,
            "bidding_strategy": manual_product_ads['bidding_strategy'],
            "cps": manual_product_ads['cps'],
            "product_placement": manual_product_ads['product_placement'],
            "impression": rpt['impression'],
            "click": rpt['click'],
            "ctr": rpt['ctr'],
            "cpc": rpt['cpc']/100000,
            "cost": rpt['cost']/100000,
            "checkout": rpt['checkout'],
            "checkout_rate": rpt['checkout_rate'],
            "order_count": rpt['broad_order'],
            "order_amount": rpt['broad_order_amount'],
            "gmv": rpt['broad_gmv']/100000,
            "roi": rpt['broad_roi'],
            "avg_rank": rpt['avg_rank'],
            "page_views": rpt['page_views'],
            "unique_visitors": rpt['unique_visitors'],
            "reach": rpt['reach'],
            "product_click": rpt['product_click'],
            "product_impression": rpt['product_impression'],
            "product_ctr": rpt['product_ctr']
        }
        if tt=='today':
            pbasic['data_date']=today
            pbasic['time_slot']=cur_hour
        else:
            pbasic['data_date']=f'{cur_year}-{cur_m}-01'
            if tt=='yesterday':
                pbasic['time_slot']=-1
            elif tt=='last_week':
                pbasic['time_slot']=-2
            elif tt=='last_month':
                pbasic['time_slot']=-3
            elif tt=='last_three_month':
                pbasic['time_slot']=-4


        zds=list(pbasic.keys())
        paras=list(pbasic.values())
        zd_on=['itemid','dtype','state']

        aff+=tms.merge_into('ShopeeAdsReport',zds,paras,zd_on,True)
    logging.info(f'店铺:《{dpName}》,{bid},报表类型:{tt},共 {len(bb_list)} 条数据,保存成功:{aff}')

    return aff

async def plw_xp_get_adbb(page:Page,basic_info,t='today',state='all'):

    '''虾皮采集各个时间段广告报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    country=basic_info['Country']
    err_count=0
    sq=SQ_Map.get(country)

    st_time,ed_time=get_shopee_timestamp(t,sq)

    
    all_items=[]
    p=1
    max_p=20
    pz=50
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await plw_get_params(page,basic_info)
    
    while True:
        payload={
                    "start_time":st_time,
                    "end_time":ed_time,
                    "filter_list":[{"campaign_type":"new_cpc_homepage","state":state,"search_term":""}],
                    "offset":(p-1)*pz,
                    "limit":pz
                }


        json_data= await plw_make_request(page,api_url,params,payload)
        if json_data:
            if json_data['code']==0:
                pitems=json_data['data']['entry_list']
                all_items.extend(pitems)

                if p==1:
                    total=json_data['data']['total']
                    max_p=math.ceil(total/pz)
                p+=1
            else:
                err_count+=1
                await asyncio.sleep(1)
        else:
            err_count+=1
        if p>max_p:
            break
        if err_count>10:
            return
    logging.info(f'店铺:《{dpName}》,{bid},时段:{t},{max_p}页,共采集 {len(all_items)} 个报表数据')
    return all_items

async def plw_xp_pause_ad(page:Page,basic_info,camp_ids):

    '''暂停对应广告'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    api_url=f'https://{host}/api/pas/v1/homepage/mass_edit/'
    params=await plw_xp_params(page,basic_info)
    data={"campaign_id_list":camp_ids,"type":"pause"}
    json_data=await plw_make_request(page,api_url,params,data)
    return json_data

def get_tit_and_des(session,pitem,country):

    tit,attrs=pitem
    json_attrs=json.loads(attrs)
    attrs_str=';'.join([f"{attr['name']}:{attr['value']}" for attr in json_attrs if '品牌' not in attr['name']])

    if country=='泰国':
        lg='泰语'
        ques1=f'''
                1688货源标题：{tit}。
                商品属性：{attrs_str}。
                泰语产品标题模板：产品名称 + 核心属性词 + 主要用途场景 + 材质面料 + 功能性能 + 尺寸容量 + 款式设计 + 人群标签 + 场景拓展 + 情绪类修饰词
                根据1688货源标题、商品属性，按我给出的泰语产品标题模板，为这个产品设计出最合理、最能够吸引消费者的泰语产品标题
                生成的泰语标题要求：
                    1.标题以泰文呈现，标题总长度不超过109个字符(必须遵守)
                    2.标题中不允许出现任何中文字符，允许有空格；
                    3.产品名称必须放在标题首位，用“ - ”与其他描述衔接；
                    4.用途、场景、人群、功能不能笼统为概括“多用途、多场景、多颜色尺寸”；
                    5.不可出现容易引发虾皮平台敏感的词汇；
                只回答得到的标题
            '''
    elif country=='马来西亚':
        lg='马来语'
        ques1=f'''
            1688货源标题：{tit}。
            商品属性：{attrs_str}。
            产品标题模板：产品名称 + 核心属性词 + 主要用途场景 + 材质面料 + 功能性能 + 尺寸容量 + 款式设计 + 人群标签 + 场景拓展 + 情绪类修饰词
            根据1688货源标题、商品属性，按我们给出的产品标题模板，为这个产品设计出最合理、最能够吸引消费者的产品标题
            要求：
                1.标题需要同时使用马来语和英语呈现，不超过109个字符
                2.产品名称必须放在标题首位，格式是“产品名称英语 【 产品名称马来语 】”，避免出现重复词汇，可以用“ - ”与其他描述衔接
                4.不能笼统概括“多用途、多场景、多颜色尺寸”
                5.不可出现容易引发虾皮平台敏感的词汇
                6.只需呈现最后的标题，无需呈现分析过程
            只回答得到的标题
        '''
    elif country=='越南':
        lg='越南语'
        ques1=f'''
            1688货源标题：{tit}。
            商品属性：{attrs_str}。
            {lg}产品标题模板：产品名称 + 核心属性词 + 主要用途场景 + 材质面料 + 功能性能 + 尺寸容量 + 款式设计 + 人群标签 + 场景拓展 + 情绪类修饰词
            根据1688货源标题、商品属性，按我给出的{lg}产品标题模板，为这个产品设计出最合理、最能够吸引消费者的泰语产品标题
            生成的{lg}标题要求：
                1.标题以{lg}呈现，标题总长度不超过109个字符(必须遵守)
                2.标题中不允许出现任何中文字符，允许有空格；
                3.产品名称必须放在标题首位，用“ - ”与其他描述衔接；
                4.用途、场景、人群、功能不能笼统为概括“多用途、多场景、多颜色尺寸”；
                5.不可出现容易引发虾皮平台敏感的词汇；
            只回答得到的标题
        '''

    ques2=f'''
            1688货源标题：{tit}。
            商品属性：{attrs_str}。
            {lg}产品描述模板：
                产品概述：简要介绍产品的基本信息，并突出其核心卖点。（2-3句）
                核心功能：详细介绍产品的核心功能，每个功能点单独列出，按重要性排序
                产品规格参数：详细列出产品的规格和参数（尺寸、重量等）
                适用人群/场景：明确说明该产品适用于的人群和场景
                附加功能/优势：产品特别的附加功能或额外价值
                行动号召:用一句简洁的话鼓励消费者采取行动
            根据1688货源标题、商品属性，按我给出的{lg}产品描述模板，为这个产品设计能够让消费者准确了解该产品的产品描述
            生成的{lg}描述要求：
                1.产品描述以{lg}呈现，不允许出现任何中文和特殊符号
                2.不要出现任何其他平台名称,以及可能侵权的话语
            只回答得到的描述
        '''

    tit=ask_deepseek(session,ques1,'deepseek-chat')
    if tit:
        des=ask_deepseek(session,ques2,'deepseek-chat')
        if des:
            return (tit,des)

with open('Cache/CNTOEN.json','r',encoding='utf-8') as f:
    cntoen_dict=json.load(f)

def get_en_skus(session,cnskus):
    global cntoen_dict
    skus=set()
    skus_dict={}
    for sid,sku1,sku2 in cnskus:
        if sku1 and not cntoen_dict.get(sku1):
            skus.add(sku1)
        if sku2 and not cntoen_dict.get(sku2):
            skus.add(sku2)
        skus_dict[str(sid)]={'sku1':sku1,'sku2':sku2}
    


    if len(skus)==0:
        return_skus=[]
        for sid,skucnddd in skus_dict.items():
            cn_sku1=skucnddd['sku1']
            yn_sku1=None
            yn_sku2=None
            if cn_sku1:
                yn_sku1=cntoen_dict[cn_sku1]
            cn_sku2=skucnddd['sku2']
            if cn_sku2:
                yn_sku2=cntoen_dict[cn_sku2]
            return_skus.append([sid,yn_sku1,yn_sku2])
        return return_skus
    
    sku_str='|||'.join(skus)

    question=f'''将以下SKU翻译成英语，要求：
                1. 仅返回翻译结果，用|||分隔，不要额外解释
                2. 每个SKU必须完全去除中文，使用缩写
                3. 每个SKU总长度（包括空格和符号）严格≤20字符
                4. 如果超长，优先截断或使用更短单词,并缩写至字符串长度等于20
                {sku_str}'''
    
    cot=ask_deepseek(session,question)
    if cot:
        cn_sku_list=sku_str.split('|||')
        yn_sku_list=cot.split('|||')
        for sjjj in range(len(cn_sku_list)):
            cntoen_dict[cn_sku_list[sjjj]]=yn_sku_list[sjjj].strip()

        return_skus=[]

        for sid,skucnddd in skus_dict.items():
            cn_sku1=skucnddd['sku1']
            yn_sku1=None
            yn_sku2=None
            if cn_sku1:
                yn_sku1=cntoen_dict[cn_sku1]
            cn_sku2=skucnddd['sku2']
            if cn_sku2:
                yn_sku2=cntoen_dict[cn_sku2]
            return_skus.append([sid,yn_sku1,yn_sku2]) 
        return return_skus

async def plw_xp_get_llbb_with_time(page:Page,basic_info,itemid):

    '''虾皮采集流量报表时间段详情数据'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    shopid=basic_info['shopid']
    country=basic_info['Country']
    st_time,ed_time=get_shopee_performance_timestamp()
    api_url=f'https://{host}/api/mydata/v1/product/performance/trend/'

    params=await plw_get_params(page,basic_info)
    if is_kj==1:
        api_url=f'https://{host}/api/mydata/cnsc/shop/v1/product/performance/trend/'
        params['cnsc_shop_id']=shopid
        params['cbsc_shop_region']=REGION_MAP[country]

    params['itemid']=itemid
    params['period']='real_time'
    params['start_time']=st_time
    params['end_time']=ed_time
    params['order_type']='confirmed'

    json_data= await plw_make_request(page,api_url,params)
    if json_data:
        if json_data['code']==0:
            pitems=json_data['result']['time_series']
            logging.info(f'店铺:《{dpName}》,{bid},共采集 {len(pitems)} 个时段时段报表数据')
            return pitems

async def plw_get_ad_conversion(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sq=SQ_Map.get(country)

    st_time,ed_time=get_shopee_timestamp(sq=sq)

    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await plw_xp_params(page,basic_info)

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"all","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await plw_make_request(page,api_url,params,data)
    if json_data.get('data'):
        total_count=json_data['data']['total']
        entry_list=json_data['data']['entry_list']

        for entry in entry_list:
            product_placement=entry['manual_product_ads']['product_placement']
            if product_placement!='targeting':
                item_id=str(entry['manual_product_ads']['item_id'])
                direct_order_amount=entry['report']['broad_order_amount']
                direct_roi=entry['report']['direct_roi']
                if not campaign_dict.get(item_id):
                    campaign_dict[item_id]=(direct_order_amount,direct_roi)

        logging.info(f'店铺:《{dpName}》,bid:{bid},订单映射:{len(campaign_dict.keys())}个,历史广告总数:{total_count}个')
    return campaign_dict

async def plw_get_xp_prolist(page:Page,basic_info,list_type='live_all'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    shopid=basic_info['shopid']
    country=basic_info['Country']
    cbsc_shop_region=REGION_MAP[country]
    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    spdc=await plw_xp_spc_cds(page)
    

    api_url=f'https://{host}/api/v3/opt/mpsku/list/v2/get_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 48,
        'list_type': list_type,
        'need_ads': True
    }
    if is_kj:
        params['cnsc_shop_id']=shopid
        params['cbsc_shop_region']=cbsc_shop_region
    pro_list=[]
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过三次弹出错误')
            return
        p=req_p.get()
        params['page_number']=p
        js_data=await plw_make_request(page,api_url,params)
        if js_data is None or (js_data and js_data.get('code')!=0):
            print(js_data)
            err_count+=1
            if is_kj:
                purl=f'https://{host}/portal/product/list/live/all?cnsc_shop_id={shopid}&operationSortBy=recommend_v2'
            else:
                purl=f'https://{host}/portal/product/list/live/all?operationSortBy=recommend_v2'  
            await plw_repeat_goto(page,purl)
            req_p.put(p)
            await asyncio.sleep(5)
            continue

        page_info=js_data['data']['page_info']
        if page_info['total']==0:
            return []
        
        pros=js_data['data']['products']
        if p==1:
            pz=page_info['page_size']
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        pro_list.extend(pros)
    
    logging.info(f'店铺:《{dpName}》,{bid},{list_type},共 {max_p} 页,获取 {len(pro_list)} 个产品')
    return pro_list

async def plw_get_entry_list(page:Page,basic_info,is_contain_sold_out=False,ddd='last_three_month'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    country=basic_info['Country']
    host=basic_info['host']
    iskj=basic_info['is_KJ']
    sq=SQ_Map.get(country)

    st_time,ed_time=get_shopee_timestamp(ddd,sq)
    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await plw_xp_params(page,basic_info)

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await plw_make_request(page,api_url,params,data)
    #print(json_data)


    camp_list=[]
    if json_data.get('data'):
        total_count=json_data['data']['total']
        entry_list=json_data['data']['entry_list']
        for entry in entry_list:
            product_placement=entry['manual_product_ads']['product_placement']
            if product_placement!='targeting':
                campaign_id=str(entry['campaign']['campaign_id'])
                item_id=str(entry['manual_product_ads']['item_id'])
                start_time=entry['campaign']['start_time']
                now_time=int(time.time())
                trait_list=entry['trait_list']
                if not campaign_dict.get(item_id):
                    if is_contain_sold_out:
                        if 'item_sold_out' not in trait_list:
                            create_days=math.ceil((now_time-start_time)/86400)
                            campaign_dict[item_id]=[campaign_id,create_days]
                            camp_list.append(entry)
                    else:
                        create_days=math.ceil((now_time-start_time)/86400)
                        campaign_dict[item_id]=[campaign_id,create_days]
                        camp_list.append(entry)
                    
        
        logging.info(f'店铺:《{dpName}》,bid:{bid},在用广告数:{len(campaign_dict.keys())},历史广告总数:{total_count}')

    return camp_list

async def plw_update_XPOrder(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    country=basic_info['Country']
    oid_list_url=f'https://{host}/api/v3/order/search_order_list_index'
    params=await plw_xp_params(page,basic_info)
    iskj=basic_info['is_KJ']
    if iskj:
        IMG_HOST_MAP[country]='https://s-cf-sg.shopeesz.com/file/'
    req_order_list_que=Queue(-1)
    req_order_list_que.put(1)
    err_count=0
    order_param_list=[]
    itemid_adc_dict=await plw_get_ad_conversion(page,basic_info)
    while True:
        if req_order_list_que.empty():
            break
        if err_count >3:
            logging.info(f'店铺:《{dpName}》,{bid},获取oid接口错误超过3次')
            return
        page_number=req_order_list_que.get()
        from_page_number=page_number-1 if page_number>1 else 1
        oid_list_data = {
            "order_list_tab":100,
            "entity_type":1,
            "pagination":
                {"from_page_number":from_page_number,
                "page_number":page_number,
                "page_size":40},
            "filter":
                {"fulfillment_type":0,"is_drop_off":0,"fulfillment_source":0,"action_filter":0},
            "sort":{"sort_type":3,"ascending":False}
        }

        js_data=await plw_make_request(page,oid_list_url,params,oid_list_data)

        if js_data is None:
            err_count+=1
            await plw_repeat_goto(page)
            await asyncio.sleep(6)
            req_order_list_que.put(page_number)
            continue

        if js_data['code']!=0:
            err_count+=1
            await plw_repeat_goto(page)
            await asyncio.sleep(6)
            req_order_list_que.put(page_number)
            continue
        
        oid_list=js_data['data']['index_list']
        if len(oid_list)==0:
            break

        if page_number==1:
            total_count=js_data['data']['pagination']['total']
            max_p=math.ceil(total_count/40)
            for ppp in range(2,max_p+1):
                req_order_list_que.put(ppp)
            logging.info(f'店铺:《{dpName}》,{bid},共 {total_count} 个订单,需请求 {max_p} 页')
        
        order_param_list.extend(oid_list)


    print(f'店铺:《{dpName}》,{bid},已获取 {len(order_param_list)} 个订单ID,开始根据获取订单卡片信息...')

    if not order_param_list:
        return 0
    
    shopid=order_param_list[0]['shop_id']
    order_card_url=f'https://{host}/api/v3/order/get_order_list_card_list'

    order_info_list=[]
    suc=0
    for j in range(math.ceil(len(order_param_list)/5)):
        cur_order_param_list=order_param_list[j*5:(j+1)*5]
        for jj in range(3):
            order_card_data={
                        "order_list_tab":100,
                        "need_count_down_desc":True,
                        "order_param_list":cur_order_param_list
                        }
            
            js_data_order=await plw_make_request(page,order_card_url,params,order_card_data)
            if js_data_order and js_data_order['code']==0:
                break
        if js_data_order is None:
            logging.info(f'店铺:《{dpName}》,{bid},获取订单卡片信息错误超过3次')
            return
        card_list=js_data_order['data']['card_list']
        
        for card in card_list:
            try:
                card_status=1
                order_card= card.get('order_card')
                if not order_card:
                    order_card=card.get('package_level_order_card')
                    card_status=2
                
                order_id=order_card['order_ext_info']['order_id']
                buyer_id=order_card['order_ext_info'].get('buyer_user_id')
                order_sn=order_card['card_header']['order_sn']
                buyer_name=order_card['card_header']['buyer_info'].get('username')
                date_str = order_sn[:6]
                order_date = datetime.datetime(int(date_str[:2]) + 2000 , int(date_str[2:4]) , int(date_str[4:]))
                if card_status==1:
                    total_price=order_card['payment_info']['total_price']
                    payment_method=order_card['payment_info']['payment_method']
                    order_status=order_card['status_info']['status']
                    order_status_des=order_card['status_info']['status_description'].get('description_value')
                    fulfilment_info=order_card['fulfilment_info']
                    item_info_list=order_card['item_info_group']['item_info_list']
                else:
                    total_price=order_card['package_list'][0]['payment_info']['total_price']
                    payment_method=order_card['package_list'][0]['payment_info']['payment_method']
                    order_status=order_card['package_list'][0]['status_info']['status']
                    order_status_des=order_card['package_list'][0]['status_info']['status_description'].get('description_value')
                    fulfilment_info=order_card['package_list'][0]['fulfilment_info']
                    item_info_list=order_card['package_list'][0]['item_info_group']['item_info_list']

                
                ship_type=''
                ship_count=None
                ship_id=None
                
                ship_name_list=[]
                for fkk,fkv in fulfilment_info.items():
                    if fkk.endswith('_name'):
                        ship_name_list.append(fkv)
                    if fkk=='tracking_number_list':
                        ship_id=fkv[0]
                        ship_count=len(fkv)

                if ship_name_list:
                    ship_type='\n'.join(ship_name_list)

                

                for ilist in item_info_list:
                    
                    pitem=ilist['item_list'][0]
                    pname=pitem['name']
                    random_code=None
                    rcode_mat= re.search(r'.*\|(.{8})\|$',pname)
                    if rcode_mat:
                        random_code=rcode_mat.group(1)
                    sku_name=pitem.get('description')
                    pimage=f"{IMG_HOST_MAP[country]}{pitem['image']}"
                    pamount=pitem['amount']
                    itemid=pitem['inner_item_ext_info']['item_id']
                    purl=f"https://{host.replace('seller.','')}/product/{shopid}/{itemid}/"
                    skuid=pitem['inner_item_ext_info']['model_id']
                    ad_paras=itemid_adc_dict.get(str(itemid))
                    ad_cd=None
                    ad_ROAS=None
                    if ad_paras:
                        ad_cd,ad_ROAS=ad_paras
                    cur_order_info={
                        'shopid':str(shopid),
                        'order_id':str(order_id),
                        'order_sn':order_sn,
                        'buyer_name':buyer_name,
                        'buyer_id':str(buyer_id),
                        'ItemID':str(itemid),
                        'skuid':str(skuid),
                        'purl':purl,
                        'pname':pname,
                        'sku_name':sku_name,
                        'pimage':pimage,
                        'amount':pamount,
                        'total_price':int(total_price/100000),
                        'payment_method':payment_method,
                        'status':order_status,
                        'status_des':order_status_des,
                        'ship_id':ship_id,
                        'ship_type':ship_type,
                        'ship_count':ship_count,
                        'order_date':order_date,
                        'bid':bid,
                        'BName':dpName,
                        'GroupName':gname,
                        'userName':userName,
                        'SJBM':sjbm,
                        'YYBM':yybm,
                        'conversions':ad_cd,
                        'ROAS':round(ad_ROAS, 4) if ad_ROAS is not None else None,
                        'random_code':random_code,
                        'country':basic_info['Country']

                    }

                    order_info_list.append(cur_order_info)
                    zds=list(cur_order_info.keys())
                    paras=list(cur_order_info.values())
                    aff=tms.merge_into('ShopeeOrder',zds,paras,['order_id','skuid'],True)
                    suc+=aff
                    #logging.info(f'店铺:《{dpName}》,{bid},订单:《{order_sn}》,skuid:{skuid},随机码:{random_code},存入状态:{aff},共更新:{suc}')
                    

            except Exception as e:
                traceback.print_exc()
                logging.info(f'店铺:《{dpName}》,{bid},解析错误 => {e}')
        
    logging.info(f'店铺:《{dpName}》,{bid},获取 {len(order_info_list)} 个订单信息,保存 {suc} 个')
    return suc

async def plw_update_XPStock(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    host=basic_info['host']
    country=basic_info['Country']
    iskj=basic_info['is_KJ']
    if iskj:
        IMG_HOST_MAP[country]='https://s-cf-sg.shopeesz.com/file/'
    shopid= await plw_xp_get_shopid(page,basic_info)
    pros=await plw_get_xp_prolist(page,basic_info)

    if pros is None:
        return
    
    aff=0
    aff_hw=0
    aff_profit=0
    camp_count=0
    aff_profit_camp_3m=0
    psku_list=[]
    cur_time=datetime.datetime.now()
    camp_list=await plw_get_entry_list(page,basic_info,is_contain_sold_out=True)
    
    pid_acos_map={str(camp['manual_product_ads']['item_id']):round(camp['report']['direct_cir'],2) for camp in camp_list}

    for pro in pros:

        try:
            pro_itemid=str(pro['id'])
            pname=pro['name']
            pimage=f"{IMG_HOST_MAP[country]}{pro['cover_image']}"
            pro_create_time=timestamp_to_sql_datetime(pro['create_time'])
            pro_modify_time=timestamp_to_sql_datetime(pro['modify_time'])
            pro_min_price=int(float(pro['price_detail']['price_min']))
            pro_ph_itemid=pro['parent_sku']
            
            if pid_acos_map.get(pro_itemid) is not None:
                camp_count+=1
                aff_profit_camp_3m+=tms.ExecNoQuery('update ProductProfit set acos_3months_id=? where rucangItemID=?',(pid_acos_map.get(pro_itemid),pro_itemid))
                

            sku_infos=pro['model_list']

            for skuinfo in sku_infos:
                sku_image=None
                origin_price=skuinfo['price_detail']['origin_price']
                promotion_price=skuinfo['price_detail']['promotion_price']
                hwid=skuinfo['sku']
                skuprice=int(float(origin_price)) if promotion_price=='0.00' else int(float(promotion_price))
                if skuinfo['image']:
                    sku_image=f"{IMG_HOST_MAP[country]}{skuinfo['image']}"

                if hwid.lower().startswith('sea'):

                    cur_hw_pdata={
                        'haiwaicangxitongbianma':hwid,
                        'rucangItemID':pro_itemid,
                        'rucangSKUID':str(skuinfo['id']),
                        'bid':bid,
                        'BName':dpName,
                        'gName':gname,
                        'shopid':shopid,
                        'SJBM':sjbm,
                        #'yunyingbianma':yybm,
                        'pname':pname,
                        'sku_name':skuinfo['name'],
                        'sku_img':sku_image
                        
                    }
                    hw_zds=list(cur_hw_pdata.keys())
                    hw_paras=list(cur_hw_pdata.values())
                    aff_hw+=tms.merge_into('Purchase_Sales_Warehouse',hw_zds,hw_paras,['haiwaicangxitongbianma'],True)

                cur_pro_sku={
                    'shopid':shopid,
                    'ItemID':pro_itemid,
                    'skuid':str(skuinfo['id']),
                    'purl':f"https://{host.replace('seller.','')}/product/{shopid}/{pro_itemid}/",
                    'pname':pname,
                    'sku_name':skuinfo['name'],
                    'pimage':pimage,
                    'sku_image':sku_image,
                    'sold_count':skuinfo['statistics']['sold_count'],
                    'price':skuprice,
                    'stock':skuinfo['stock_detail']['total_available_stock'],
                    'modify_time':pro_modify_time,
                    'create_time':pro_create_time,
                    'ph_itemid':pro_ph_itemid,
                    'ph_skuid':skuinfo['sku'],
                    'bid':bid,
                    'BName':dpName,
                    'GroupName':gname,
                    'userName':userName,
                    'SJBM':sjbm,
                    'YYBM':yybm,
                    'country':country
                }
                zds=list(cur_pro_sku.keys())
                paras=list(cur_pro_sku.values())
                aff+=tms.merge_into('ShopeeStock',zds,paras,['ItemID','skuid'],True)
                psku_list.append(cur_pro_sku)

                ccc_skuid=str(skuinfo['id'])
                aff_profit+=tms.ExecNoQuery('update ProductProfit set sale_price_id=? where rucangSKUID=?',(skuprice,ccc_skuid))

        except Exception as e:
            traceback.print_exc()
            logging.error(f'json解析虾皮产品列表数据错误 => {e}')

    logging.info(f'店铺:《{dpName}》,{bid},共 {len(pros)} 个产品,{len(psku_list)}个sku,更新{aff}个sku库存,\n{aff_hw}个海外仓,{aff_profit}个利润表价格,{camp_count}个广告产品,{aff_profit_camp_3m}个acos')

    return aff

async def pwl_get_to_do_list(page,basic_info):
    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/sellermisc/homepage/get_to_do_list_summary/'
    api_url=f'https://{host}/api/miscellaneous/homepage/get_to_do_list_summary/'
    params=await plw_xp_params(page,basic_info)

    for jjj in range(3):
        json_data=await plw_make_request(page,api_url,params)
        if json_data:
            break
        else:
            await plw_repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《To Do List》接口错误')
        return
    # with open('JSONDATA/首页todolist.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
        to_do_list_data=json_data['data']
        unpaid=None
        to_process_shipment=to_do_list_data['shipment_to_process']
        processed_shipment=to_do_list_data['shipment_processed']
        pending_cancellation=None
        pending_return=to_do_list_data['order_return_refund_cancel']
        banned_products=to_do_list_data['product_banned_deboosted']
        sold_out_products=None
        to_join_bidding=to_do_list_data['promotion_to_join_bidding']
    except Exception as e:
        traceback.print_exc()
        logging.error(f'店铺:《{dpName}》,bid:{bid},解析 to do list 出错 => {e}')
        return
    return [unpaid,to_process_shipment,processed_shipment,pending_cancellation,
            pending_return,banned_products,sold_out_products,to_join_bidding]

async def plw_get_business_insights(page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/mydata/homepage/key-metrics'
    api_url=f'https://{host}/api/mydata/v2/homepage/key-metrics/'
    params=await plw_xp_params(page,basic_info)
    params['order_type']='confirmed'


    for jjj in range(3):
        json_data=await plw_make_request(page,url,params)
        if json_data:
            break
        else:
            await plw_repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《Business_Insights》接口错误')
        return
    # with open('JSONDATA/首页Business_Insights.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
    
        busi_data=json_data['data']
        visitors=busi_data['uv']
        visitors_trend=round(busi_data['uv_pct_diff'],4)
        page_views=busi_data['pv']
        page_views_trend=round(busi_data['pv_pct_diff'],4)
        orders=busi_data['orders']
        orders_trend=round(busi_data['orders_pct_diff'],4)
        conversion_rate=round(busi_data['conversion_rate'],4)
        conversion_rate_trend=round(busi_data['conversion_rate_pct_diff'],4)

        return [visitors,visitors_trend,page_views,page_views_trend,
                orders,orders_trend,conversion_rate,conversion_rate_trend]
        
    except Exception as e:
        traceback.print_exc()
        logging.error(f'店铺:《{dpName}》,bid:{bid},解析Business_Insights,出错=> {e}')
        return

async def plw_get_performance(page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    shopid=basic_info['shopid']
    country=basic_info['Country']
    is_KJ=basic_info['is_KJ']
    url=f'https://{host}/api/v2/performance/getShopPerformanceForSC'
    params=await plw_xp_params(page,basic_info)
    data={}
    for jjj in range(3):
        json_data=await plw_make_request(page,url,params,data)
        if json_data:
            break
        else:
            await plw_repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《Performance》接口错误')
        return
    
    # with open('JSONDATA/首页Performance.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
        metrics=json_data['metrics']
        metrics_dict={metric['metric_name']:metric['metric_value'] for metric in metrics}
        Non_fulfilment_Rate=metrics_dict['NON_FULFILLMENT_RATE']
        if not Non_fulfilment_Rate:
            Non_fulfilment_Rate=None
        else:
            Non_fulfilment_Rate=try_to_float(Non_fulfilment_Rate)
            if Non_fulfilment_Rate:
                Non_fulfilment_Rate=round(Non_fulfilment_Rate/100,4)
        
        Late_Shipment_Rate=metrics_dict['LATE_SHIPMENT_RATE']
        Preparation_Time=metrics_dict['PREPARATION_TIME']
        My_Penalty=json_data['penalty_points']
        return [Non_fulfilment_Rate,Late_Shipment_Rate,Preparation_Time,My_Penalty]
    except Exception as e:
        traceback.print_exc()
        logging.info(f'店铺:《{dpName}》,bid:{bid},解析Performance,出错=> {e}')
        return

async def plw_xp_get_kj_homeinfo(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    shopid=await plw_xp_get_shopid(page,basic_info)
    spdc=await plw_xp_spc_cds(page)
    
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'cnsc_shop_id': shopid,
        'cbsc_shop_region': REGION_MAP[country]
    }
    data={}
    zds=[]
    paras=[]
    #to do list
    url1=f'https://{host}/api/miscellaneous/todo_list/get_order_metrics/'
    json_data1=await plw_make_request(page,url1,params,data)
    if json_data1 and json_data1.get('code')==0:

        unpaid=json_data1['unpaid_order']
        to_process_shipment=json_data1['to_process_shipment_order']
        processed_shipment=json_data1['processed_shipment_order']
        pending_cancellation=json_data1['pending_cancellation_order']
        pending_return=json_data1['return_refunds_order']
        zds.extend(['unpaid','to_process_shipment','processed_shipment','pending_cancellation',
            'pending_return'])
        paras.extend([unpaid,to_process_shipment,processed_shipment,pending_cancellation,
                    pending_return])
    else:
        print(json_data1)
    url2=f'https://{host}/api/miscellaneous/todo_list/get_banned_product_metrics/'
    json_data2=await plw_make_request(page,url2,params,data)
    if json_data2 and json_data2.get('code')==0:
        banned_products=json_data2['banned_product']
        zds.extend(['banned_products'])
        paras.extend([banned_products])
    else:
        print(json_data2)

    url3=f'https://{host}/api/miscellaneous/todo_list/get_sold_out_product_metrics/'
    json_data3=await plw_make_request(page,url3,params,data)
    if json_data3 and json_data3.get('code')==0:
        sold_out_products=json_data3['sold_out_product']
        zds.extend(['sold_out_products'])
        paras.extend([sold_out_products])
    else:
        print(json_data3)
    

    #Business Insights
    st_time,ed_time=get_shopee_performance_timestamp()
    url4=f'https://{host}/api/mydata/cnsc/merchant/homepage/key-metrics'
    parsms2={
        'end_time': ed_time,
        'start_time': st_time,
        'period': 'real_time',
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'cnsc_shop_id': shopid,
        'cbsc_shop_region': REGION_MAP[country] 
    }
    json_data4=await plw_make_request(page,url4,parsms2)
    if json_data4 and json_data4.get('code')==0:

        business_data=json_data4['result']
        visitors=business_data['product_clicks']
        visitors_trend=business_data['product_clicks_daily_rate']
        page_views=business_data['page_views']
        page_views_trend=business_data['page_views_daily_rate']
        orders=business_data['order']
        orders_trend=business_data['orders_daily_rate']
        zds.extend(['visitors','visitors_trend','page_views','page_views_trend',
            'orders','orders_trend'])
        paras.extend([visitors,visitors_trend,page_views,page_views_trend,orders,orders_trend])
    else:
        print(json_data4)

    #Performance
    url5=f'https://{host}/api/v2/shops/sellerCenter/shopPerformance/'
    json_data5=await plw_make_request(page,url5,params)
    if json_data5 and json_data5.get('status')==200:

        per_data=json_data5['data']
        fulFillMent_list=per_data['fulFillMent']
        for fulFillMent in fulFillMent_list:

            metricId=fulFillMent['metricId']
            if metricId==1:
                lastPeriod=fulFillMent['lastPeriod']
                if lastPeriod=='-999':
                    Late_Shipment_Rate=None
                else:
                    Late_Shipment_Rate=round(float(lastPeriod)/100,4)
            elif metricId==3:
                lastPeriod=fulFillMent['lastPeriod']
                if lastPeriod=='-999':
                    Non_fulfilment_Rate=None
                else:
                    Non_fulfilment_Rate=round(float(lastPeriod)/100,4)
            elif metricId==4:
                lastPeriod=fulFillMent['lastPeriod']
                if lastPeriod=='-999':
                    Preparation_Time=None
                else:
                    Preparation_Time=float(lastPeriod)
        zds.extend(['Non_fulfilment_Rate','Late_Shipment_Rate','Preparation_Time'])
        paras.extend([Non_fulfilment_Rate,Late_Shipment_Rate,Preparation_Time])

    else:
        print(json_data5)
    
    url6=f'https://{host}/api/v2/shops/sellerCenter/ongoingPoints/'
    json_data6=await plw_make_request(page,url6,params)
    if json_data6 and json_data6.get('status')==200:
        My_Penalty=json_data6['data']['totalPoints']
        zds.extend(['My_Penalty'])
        paras.extend([My_Penalty])
    else:
        print(json_data6)
    
    logging.info(f'跨境店铺:《{dpName}》,{bid},成功获取 {len(zds)} 个参数,{len(paras)} 个值')
    return (zds,paras)
    
async def plw_get_ads_credit(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/pas/v1/meta/get_ads_data/'
    params=await plw_xp_params(page,basic_info)

    data={"info_type_list":["ads_credit","ads_account","ads_expense"]}

    js_data=await plw_make_request(page,url,params,data)

    if js_data and js_data.get('data'):
        ads_credit=round(js_data['data']['ads_credit']['total']/100000,2)
        logging.info(f'店铺:《{dpName}》,{bid},获取广告余额:{ads_credit}')
        return ads_credit
    else:
        print(js_data)

async def plw_get_camp_info(page:Page,basic_info,ishome=False):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    is_kj=basic_info['is_KJ']
    sq=SQ_Map.get(country)

    st_time,ed_time=get_shopee_timestamp('last_month',sq)
    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await plw_get_params(page,basic_info)
    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await plw_make_request(page,api_url,params,data)
    if json_data and json_data.get('data'):
        total_count=json_data['data']['total']
        entry_list=json_data['data']['entry_list']

        for entry in entry_list:
            product_placement=entry['manual_product_ads']['product_placement']
            if product_placement!='targeting':
                campaign_id=str(entry['campaign']['campaign_id'])
                item_id=str(entry['manual_product_ads']['item_id'])
                start_time=entry['campaign']['start_time']
                now_time=int(time.time())
                trait_list=entry['trait_list']
                if not campaign_dict.get(item_id):
                    if 'item_sold_out' not in trait_list:
                        create_days=math.ceil((now_time-start_time)/86400)
                        campaign_dict[item_id]=[campaign_id,create_days]
        
        logging.info(f'店铺:《{dpName}》,{bid},在用广告数:{len(campaign_dict.keys())},历史广告总数:{total_count}')
        if ishome:
            return total_count
        return campaign_dict

async def js_get_pro_trait_list(page:Page,basic_info,is_only_one=False):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid= basic_info['shopid']
    if not shopid:
        shopid=await plw_xp_get_shopid(page,basic_info)
    url=f'https://{basic_info["host"]}/api/pas/v1/setup_helper/product_selector/query/'
    params= await plw_xp_params(page,basic_info)
    random_uuid = str(uuid.uuid4())
    st_time=get_yn_0timestamp()
    p_no_ad=[]
    p_has_ad=[]
    itemids_isadd=[]
    ppp=1
    last_token=''
    while True:
        data = {"pagination":{"last_token":last_token,"limit":50},
                "order":None,"show_all_item":False,
                "filter":{"recommendation_type":None},
                "ads_information":{"product_placement":"all","end_time":0,"start_time":st_time,"bidding_strategy":"roi_two"},
                "reference_id":random_uuid,
                "header":{}
                }

        js_data = await plw_make_request(page,url,params,data)
        if js_data:
            if js_data['code']==0:
                entry_list=js_data['data']['entry_list']
                for entry in entry_list:
                    
                    cur_itemid= entry['item_id']
                    if cur_itemid not in itemids_isadd:
                        trait_list=entry.get('trait_list')
                        if not trait_list:
                            p_no_ad.append(entry)
                        else:
                            if 'npb_added' in trait_list[0] and len(trait_list)==1:
                                p_no_ad.append(entry)
                            else:
                                p_has_ad.append(entry)
                        itemids_isadd.append(cur_itemid)
                next_token=js_data['data']['next_page_token']
                logging.info(f'店铺:《{dpName}》,{bid},第{ppp}页,加载 {len(entry_list)} 个产品')
                if not next_token:
                    break
                if is_only_one:
                    break
                last_token=next_token


            else:
                break
            
        else:
            break

        ppp+=1


    logging.info(f'店铺:《{dpName}》,{bid},共{ppp}页,当前共{len(p_no_ad)+len(p_has_ad)}个广告产品,其中还没有广告的产品 {len(p_no_ad)} 个')
    return p_no_ad,p_has_ad

async def plw_update_CreAD_Task(page:Page,basic_info,rw_msg,status='后'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    max_count=basic_info['ads_target_quantity']
    shopid=await plw_xp_get_shopid(page,basic_info)
    res_ad=await js_get_pro_trait_list(page,basic_info)
    oning_count=await plw_get_camp_info(page,basic_info,True)
    if res_ad:
        p_no_ad,p_has_ad=res_ad

        if status=='前':
            need_zd=['need_create_before','has_created_before']
            if len(p_no_ad)>0:
                rw_msg='正在运行'
        elif status=='后':
            need_zd=['need_create_after','has_created_after']
            if len(p_no_ad)>0:
                rw_msg=f'还有 {len(p_no_ad)} 个未创建'
        zds=['shopid','bid','BName','GroupName','userName','password','SJBM','YYBM','rw_msg']+need_zd
        paras=[shopid,bid,dpName,gname,userName,password,sjbm,yybm,rw_msg,len(p_no_ad),len(p_has_ad)]
        aff0=tms.merge_into('ShopeeCreadTask',zds,paras,['bid'],True)
        logging.info(f'店铺:《{dpName}》,{bid},更新创建{status}任务:{aff0}')
        if status=='前' and max_count and oning_count>=max_count:
            return []
        return p_no_ad
    else:
        zds=['shopid','bid','BName','GroupName','userName','password','SJBM','YYBM','rw_msg']
        paras=[shopid,bid,dpName,gname,userName,password,sjbm,yybm,'获取广告信息错误']

zw=None
async def plw_xp_get_adbb_with_time(page:Page,basic_info,campid):
    
    '''虾皮采集广告报表时间段详情数据'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    shopid=basic_info['shopid']
    country=basic_info['Country']
    api_url=f'https://{host}/api/pas/v1/report/get_time_graph/'
    sq=SQ_Map.get(country)

    
    params=await plw_get_params(page,basic_info)
    st_time,ed_time=get_shopee_timestamp('yesterday',sq)
    data={
            "need_roi_target_setting":True,
            "agg_interval":4,
            "campaign_type":"product",
            "filter_params":{"campaign_id":int(campid)},
            "start_time":st_time,
            "end_time":ed_time,
            "need_new_product_boost_setting":True,
            "need_cps_setting":True,
            "need_new_product_boost_two_setting":True,
            "header":{},
            "device_sz_fingerprint":basic_info['device_sz_fingerprint']
            }
    if zw is not None:
        params['device_sz_fingerprint']=zw
    for _ in range(2):

        json_data= await plw_make_request(page,api_url,params,data)
        if json_data and json_data.get('code')==0:
            # with open(f'测试数据/{campid}.json','w',encoding='utf-8') as f:
            #     json.dump(json_data,f,ensure_ascii=False)
            return json_data['data']['report_by_time']
        
        else:
            logging.info(f'店铺:《{dpName}》,{bid},campid:《{campid}》,请求区间报表错误 => {json_data},等待 60 秒刷新')
            await asyncio.sleep(60)
            device_fp=await plw_xp_get_zw(page,basic_info,campid)
            params['device_sz_fingerprint']=device_fp
              
async def plw_xp_get_zw(page:Page,basic_info,campid):
    global zw
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    shopid=basic_info['shopid']
    country=basic_info['Country']
    logging.info(f'店铺:《{dpName}》,{bid},广告{campid},获取当前浏览器指纹信息...')
    st_time_sq8,ed_time_sq9=get_shopee_timestamp('yesterday',8)
    try:
        async with page.expect_response(lambda r: "api/pas/v1/report/get_time_graph" in r.url and r.status == 200,timeout=90000) as resp_info:
            camp_url=f'https://{host}/portal/marketing/pas/product/manual/{campid}?from={st_time_sq8}&to={ed_time_sq9}&group=yesterday'
            await plw_repeat_goto(page,camp_url)
            await asyncio.sleep(2)

        res_zz = await resp_info.value 
        req = res_zz.request           
        payload = req.post_data
        data = json.loads(payload)
        device_fp = data['device_sz_fingerprint']
        basic_info['device_sz_fingerprint']=device_fp
        zw=device_fp
        logging.info(f'店铺:《{dpName}》,{bid},成功获取浏览器指纹信息长度:{len(device_fp)}')
        return device_fp
    except Exception as e:
        traceback.print_exc()
        logging.info(f'店铺:《{dpName}》,{bid},获取指纹信息错误 => {e}')

async def plw_xp_get_income(page:Page,basic_info):

    '''虾皮接口采集收入详情列表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    is_kj=basic_info['is_KJ']
    country=basic_info['Country']
    err_count=0
    sq=SQ_Map.get(country)

    
    st_time,et_time=get_shopee_timestamp()
    st_date_str=datetime.datetime.fromtimestamp(st_time).strftime(r'%Y-%m-%d')
    ed_date_str=datetime.datetime.fromtimestamp(et_time-86400).strftime(r'%Y-%m-%d')

    all_items=[]
    p=1
    pz=50
    api_url=f'https://{host}/api/v4/accounting/pc/seller_income/income_overview/get_income_detail'

    params=await plw_get_params(page,basic_info)
    payload={
        "source_type":0,
        "income_category":2,
        "pagination_info":{"direction":0,"limit":pz},
        "local_query_condition":{"start_date":st_date_str,"end_date":ed_date_str}
    }
    while True:          
        json_data= await plw_make_request(page,api_url,params,payload)
        if json_data:
            if json_data['code']==0:
                pitems=json_data['data']['list']
                all_items.extend(pitems)
                logging.info(f'店铺:《{dpName}》,{bid},第 {p} 页,成功采集 {len(pitems)} 个Income数据')
                if json_data['data'].get('next_page'):
                    cursor=json_data['data']['next_page']['cursor']
                    payload['pagination_info']['cursor']=cursor
                    p+=1
                else:
                    break

            else:
                logging.info(f'店铺:《{dpName}》,{bid},第 {p} 页,采集Incode失败 => {json_data}')
                await asyncio.sleep(1)
                await plw_check_pass(page,basic_info,True)
                err_count+=1
        else:
            await asyncio.sleep(1)
            await plw_check_pass(page,basic_info,True)
            err_count+=1

        if err_count>10:
            break
        
    logging.info(f'店铺:《{dpName}》,{bid},共 {p} 页,采集 {len(all_items)} 个Income数据')
    return all_items

def to_db_column_name(raw_name: str) -> str:
    """
    将原始列名（例如 'Order Creation Date' 或 'Total Released Amount (Rp)'）
    转换为 SQL Server 可用的列名（例如 'Order_Creation_Date' 或 'Total_Released_Amount_Rp'）
    """
    # 去除首尾空白
    name = raw_name.strip()

    # 去除括号内的单位符号，比如 (Rp)、(Buyer)
    name = re.sub(r'[()]', '', name)

    # 将所有非字母数字字符（空格、斜杠、连字符等）替换为下划线
    name = re.sub(r'[^0-9a-zA-Z]+', '_', name)

    # 如果开头是数字，加上前缀
    if re.match(r'^\d', name):
        name = 'col_' + name

    # 去掉可能结尾的下划线
    name = name.strip('_')

    return name

async def plw_xp_get_income_rpt(page:Page,basic_info):

    '''虾皮下载收入详情报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sjbm=basic_info['SJBM']
    yybm=basic_info['YYBM']
    is_kj=basic_info['is_KJ']
    sq=SQ_Map.get(country)

    
    st_time,et_time=get_shopee_timestamp()
    st_date_str=datetime.datetime.fromtimestamp(st_time).strftime(r'%Y-%m-%d')
    ed_date_str=datetime.datetime.fromtimestamp(et_time-86400).strftime(r'%Y-%m-%d')


    api_url=f'https://{host}/api/v4/accounting/pc/seller_income/income_report/request_income_report_generation'
    params=await plw_get_params(page,basic_info)
    payload={"income_category":2,"start_date":st_date_str,"end_date":ed_date_str}

    json_data=await plw_make_request(page,api_url,params,payload)
    if not (json_data and json_data.get('code')==0):
        logging.info(f'店铺:《{dpName}》,{bid},获取Income报表reportid错误 => {json_data}')
        return
    rptid=json_data['data']['report_id']
    logging.info(f'店铺:《{dpName}》,{bid},成功获取获取Income报表reportid:《{rptid}》')

    api_url2=f'https://{host}/api/v4/accounting/pc/seller_income/income_report/get_income_report'
    params2=copy.copy(params)
    params2['report_id']=rptid
    wait_count2=0
    while True:
        json_data2=await plw_make_request(page,api_url2,params2)
        if json_data2 and json_data2.get('code')==0:
            if json_data2['data'].get('status')==2:
                logging.info(f'店铺:《{dpName}》,{bid},等待Income报表加载完毕')
                break
        await asyncio.sleep(1)
        wait_count2+=1
        if wait_count2>180:
            logging.info(f'店铺:《{dpName}》,{bid},等待Income报表加载超时退出')
            return
    params3=copy.copy(params)
    api_url3=f'https://{host}/api/v4/accounting/pc/seller_income/income_report/query_income_report'
    params3['document_id']=rptid
    json_data3=await plw_make_request(page,api_url3,params3)

    if json_data3 and json_data3.get('code')==0:
        file_path=json_data3['data']['file_path']
        logging.info(f'店铺:《{dpName}》,{bid},成功获取Income报表xlsx文件地址:《{file_path}》')
    
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取Income报表xlsx地址失败 => {json_data3}')
        return

    api_url4=f'https://{host}/api/v4/accounting/pc/seller_income/income_report/get_income_document_file'
    params4={
        'path':file_path
    }
    cot_byte_list= await plw_get_cot(page,api_url4,params4)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},下载Income报表xlsx文件大小:{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
    else:
        logging.info(f'店铺:《{dpName}》,{bid},下载Income报表xlsx文件失败')
        return

    with open('TABLEJSON/ShopeeIncome.json','r',encoding='utf-8') as f:
        col_json=json.load(f)
    cols=list(col_json.keys())
    try:
        file_bytes = BytesIO(excel_content)
        wb = openpyxl.load_workbook(file_bytes)
        ws=wb['Income']
        tit_ind=6
        for r in range(1,ws.max_row+1):
            cvalue=ws.cell(r,1).value
            if cvalue=='Sequence No.':
                tit_ind=r
                break
        tits=[ws.cell(tit_ind,c).value for c in range(1,ws.max_column+1)]
        for jj,tit in enumerate(tits[:]):
            if tit is None or str(tit).strip() == "":
                tits[jj]=''.join(random.choices(string.ascii_letters + string.digits, k=8))
        ntits=[to_db_column_name(tit) for tit in tits]
        indatas=[]
        for rr in range(tit_ind+1,ws.max_row+1):
            try:
                indata={
                    'bid':bid,
                    'BName':dpName,
                    'Country':country,
                    'SJBM':sjbm,
                    'YYBM':yybm
                }
                for cc in range(1,ws.max_column+1):
                    cur_tit=ntits[cc-1]
                    if cur_tit in cols: 
                        indata[cur_tit]=ws.cell(rr,cc).value
                indatas.append(indata)
            except Exception as e:
                print(e)
        
    except Exception as e:
        logging.info(f'店铺:《{dpName}》,{bid},解析Income报表xlsx文件失败 => {e}')
        return
    logging.info(f'店铺:《{dpName}》,{bid},成功解析并获取 {len(indatas)} 条Income数据')
    return indatas

async def plw_xp_get_balance_rpt(page:Page,basic_info):

    '''虾皮下载交易明细报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sjbm=basic_info['SJBM']
    yybm=basic_info['YYBM']
    is_kj=basic_info['is_KJ']
    sq=SQ_Map.get(country)

    
    st_time,et_time=get_shopee_timestamp()
    st_date_str=datetime.datetime.fromtimestamp(st_time).strftime(r'%Y-%m-%d')
    ed_date_str=datetime.datetime.fromtimestamp(et_time-86400).strftime(r'%Y-%m-%d')


    api_url=f'https://{host}/api/v4/seller/local_wallet/export_wallet_transactions'

    params=await plw_get_params(page,basic_info)
    payload={
                "wallet_provider":0,
                "start_date":st_date_str,
                "end_date":ed_date_str,
                "transaction_types":[101,102,2102,2101,402,401,404,405,460,406,407,408,412,415,416,417,411,461,413,414,418,419,464,462,463,465,468,469,471,472,473,477,474,502,501,503,450,301,504,505,302,451,203,201,4001,4000,801,802],
                "file_type":1,
                "client_type":1
            }
    if is_kj:
        api_url=f'https://seller.shopee.cn/api/cb_wallet/export_withdrawal_report'
        payload={
                    "start_date":st_date_str,
                    "end_date":ed_date_str,
                    "language":"zh-CN",
                    "status":0
                }
    json_data=await plw_make_request(page,api_url,params,payload)
    if is_kj:
        rptid = json_data.get('report', {}).get('report_id')
        
    else:
        rptid = json_data.get('data', {}).get('wallet_transaction_report', {}).get('report_id')
    if rptid:
        logging.info(f'店铺:《{dpName}》,{bid},成功获取获取Balance报表reportid:《{rptid}》')
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取Balance报表reportid错误 => {json_data}')
        return

    api_url2=f'https://{host}/api/v4/seller/local_wallet/get_wallet_transaction_report'
    if is_kj:
        api_url2=f'https://seller.shopee.cn/api/cb_wallet/get_report'
        payload2={"report_id":rptid}
    params2=copy.copy(params)
    params2['report_id']=rptid
    wait_count2=0
    while True:
        if is_kj:
            json_data2=await plw_make_request(page,api_url2,params,payload2)
            if json_data2 and json_data2.get('report', {}).get('status')==2:
                dpath=json_data2['report']['download_path']
                logging.info(f'店铺:《{dpName}》,{bid},等待Balance报表加载完毕')
                break
        else:
            json_data2=await plw_make_request(page,api_url2,params2)
            if json_data2 and json_data2.get('error')==0:
                ccc_status=json_data2['data']['wallet_transaction_report']['status']
                if ccc_status==2:
                    logging.info(f'店铺:《{dpName}》,{bid},Balance报表加载完毕')
                    break
                else:
                    logging.info(f'店铺:《{dpName}》,{bid},正在加载Balance报表,当前状态:{ccc_status}')
        await asyncio.sleep(1)
        wait_count2+=1
        if wait_count2>180:
            logging.info(f'店铺:《{dpName}》,{bid},等待Balance报表加载超时退出')
            return
    if is_kj:
        cot_byte_list= await plw_get_cot(page,f'https://{host}{dpath}')
    else:
        api_url4=f'https://{host}/api/v4/seller/local_wallet/download_wallet_transaction_report'
        params4={
            'report_id':rptid
        }
        cot_byte_list= await plw_get_cot(page,api_url4,params4)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},下载Balance报表xlsx文件大小:{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
    else:
        logging.info(f'店铺:《{dpName}》,{bid},下载Balance报表xlsx文件失败')
        return

    with open('TABLEJSON/ShopeeMyBalance.json','r',encoding='utf-8') as f:
        col_json=json.load(f)
    cols=list(col_json.keys())
    try:
        file_bytes = BytesIO(excel_content)
        wb = openpyxl.load_workbook(file_bytes)
        ws=wb['Transaction Report']
        tit_ind=18
        for r in range(1,ws.max_row+1):
            cvalue=ws.cell(r,1).value
            if cvalue=='Date':
                tit_ind=r
                break
        tits=[ws.cell(tit_ind,c).value for c in range(1,ws.max_column+1)]
        for jj,tit in enumerate(tits[:]):
            if tit is None or str(tit).strip() == "":
                tits[jj]=''.join(random.choices(string.ascii_letters + string.digits, k=8))
        ntits=[to_db_column_name(tit) for tit in tits]
        bdatas=[]
        for rr in range(tit_ind+1,ws.max_row+1):
            try:
                indata={
                    'bid':bid,
                    'BName':dpName,
                    'Country':country,
                    'SJBM':sjbm,
                    'YYBM':yybm
                }
                for cc in range(1,ws.max_column+1):
                    cur_tit=ntits[cc-1]
                    if cur_tit in cols: 
                        indata[cur_tit]=ws.cell(rr,cc).value
                bdatas.append(indata)
            except Exception as e:
                print(e)
        
    except Exception as e:
        logging.info(f'店铺:《{dpName}》,{bid},解析Balance报表xlsx文件失败 => {e}')
        return
    logging.info(f'店铺:《{dpName}》,{bid},成功解析并获取 {len(bdatas)} 条Balance数据')
    return bdatas

async def plw_xp_get_pro_sku_list(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    host=basic_info['host']
    country=basic_info['Country']
    iskj=basic_info['is_KJ']
    if iskj:
        IMG_HOST_MAP[country]='https://s-cf-sg.shopeesz.com/file/'
    shopid= await plw_xp_get_shopid(page,basic_info)

    if bid in['k146j6c8']:
        pros=await plw_xp_search_prolist(page,basic_info,'live_all')
    else:
        pros=await plw_get_xp_prolist(page,basic_info)

    if pros is None:
        return
    psku_list=[]
    for pro in pros:

        try:
            pro_itemid=str(pro['id'])
            pname=pro['name']
            pimage=f"{IMG_HOST_MAP[country]}{pro['cover_image']}"
            pro_create_time=timestamp_to_sql_datetime(pro['create_time'])
            pro_modify_time=timestamp_to_sql_datetime(pro['modify_time'])
            campaign_id=str(pro['boost_info']['campaign_id'])
            itemid_1688=pro['parent_sku']
            p_view_count=pro['statistics']['view_count']
            p_liked_count=pro['statistics']['liked_count']
            p_sold_count=pro['statistics']['sold_count']
            p_stock=pro['stock_detail']['total_available_stock']
            p_seller_stock=pro['stock_detail']['total_seller_stock']
            p_shopee_stock=pro['stock_detail']['total_shopee_stock']
            sku_infos=pro['model_list']

            for skuinfo in sku_infos:
                sku_image=None
                origin_price=skuinfo['price_detail']['origin_price']
                promotion_price=skuinfo['price_detail']['promotion_price']
                skuid_1688=skuinfo['sku']
                skuprice=(float(origin_price)) if promotion_price=='0.00' else (float(promotion_price))
                if skuinfo['image']:
                    sku_image=f"{IMG_HOST_MAP[country]}{skuinfo['image']}"


                cur_pro_sku={
                    'shopid':shopid,
                    'ItemID':pro_itemid,
                    'campaign_id':campaign_id,
                    'skuid':str(skuinfo['id']),
                    'purl':f"https://{host.replace('seller.','')}/product/{shopid}/{pro_itemid}/",
                    'pname':pname,
                    'sku_name':skuinfo['name'],
                    'pimage':pimage,
                    'p_view_count':p_view_count,
                    'p_liked_count':p_liked_count,
                    'p_sold_count':p_sold_count,
                    'p_stock':p_stock,
                    'p_seller_stock':p_seller_stock,
                    'p_shopee_stock':p_shopee_stock,
                    'sku_image':sku_image,
                    'sku_sold_count':skuinfo['statistics']['sold_count'],
                    'sku_price':skuprice,
                    'sku_stock':skuinfo['stock_detail']['total_available_stock'],
                    'sku_seller_stock':skuinfo['stock_detail']['total_seller_stock'],
                    'sku_shopee_stock':skuinfo['stock_detail']['total_shopee_stock'],
                    'modify_time':pro_modify_time,
                    'create_time':pro_create_time,
                    'itemid_1688':itemid_1688,
                    'skuid_1688':skuid_1688,
                    'bid':bid,
                    'BName':dpName,
                    'GroupName':gname,
                    'userName':userName,
                    'country':country,
                    'SJBM':sjbm,
                    'YYBM':yybm
                }
                psku_list.append(cur_pro_sku)
        except Exception as e:
            traceback.print_exc()
            logging.error(f'json解析虾皮产品列表数据错误 => {e}')

    logging.info(f'店铺:《{dpName}》,{bid},共 {len(pros)} 个产品,{len(psku_list)}个sku')

    return psku_list

async def plw_xp_get_ProductOverview_rpt(page:Page,basic_info,tt):

    '''虾皮下载ProductOverview报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sjbm=basic_info['SJBM']
    yybm=basic_info['YYBM']
    is_kj=basic_info['is_KJ']
    sq=SQ_Map.get(country)
    period_map={
        'real_time':'r',
        'yesterday':'y',
        'past7days':'w',
        'past30days':'m'
    }
    t_map={
        'real_time':'小时',
        'yesterday':'昨日',
        'past7days':'七天',
        'past30days':'三十天'
    }
    st_time,et_time=get_shopee_performance_timestamp(period_map[tt])


    api_url4=f'https://{host}/api/mydata/v2/product/overview/export/'
    if is_kj:
        api_url4=f'https://{host}/api/mydata/cnsc/shop/product/overview/export/'

    params4={
        'period': tt,
        'start_ts': st_time,
        'end_ts': et_time
    }


    cot_byte_list= await plw_get_cot(page,api_url4,params4)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},时间段:{tt},下载ProductOverview报表xlsx文件大小:{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
    else:
        logging.info(f'店铺:《{dpName}》,{bid},时间段:{tt},下载ProductOverview报表xlsx文件失败')
        return

    with open('TABLEJSON/ShopeeProductOverview.json','r',encoding='utf-8') as f:
        col_json=json.load(f)
    cols=list(col_json.keys())
    if is_kj:
        cn_to_col={v:k for k,v in col_json.items()}
        cols=list(cn_to_col.keys())
    try:
        file_bytes = BytesIO(excel_content)
        wb = openpyxl.load_workbook(file_bytes)
        ws=wb['overview']
        tit_ind=1
        for r in range(1,ws.max_row+1):
            cvalue=ws.cell(r,1).value
            if cvalue=='Date':
                tit_ind=r
                break
        tits=[ws.cell(tit_ind,c).value for c in range(1,ws.max_column+1)]
        for jj,tit in enumerate(tits[:]):
            if tit is None or str(tit).strip() == "":
                tits[jj]=''.join(random.choices(string.ascii_letters + string.digits, k=8))
        if is_kj:
            ntits=tits
        else:
            ntits=[to_db_column_name(tit) for tit in tits]
        bdatas=[]
        for rr in range(tit_ind+1,ws.max_row+1):
            try:
                indata={
                    'bid':bid,
                    'BName':dpName,
                    'Country':country,
                    'SJBM':sjbm,
                    'YYBM':yybm,
                    'dtype':t_map[tt]
                }
                for cc in range(1,ws.max_column+1):
                    cur_tit=ntits[cc-1]
                    if cur_tit in cols:
                        ccc_value=ws.cell(rr,cc).value
                        if is_kj:
                            cur_tit=cn_to_col[cur_tit]
                        if cur_tit=='Date':
                            if ':' in ccc_value:
                                dt = datetime.datetime.strptime(ccc_value, '%d-%m-%Y %H:%M')
                            else:
                                dt = datetime.datetime.strptime(ccc_value, '%d-%m-%Y')
                            indata[cur_tit]=dt.strftime("%Y-%m-%d %H:%M:%S")
                        else:
                            indata[cur_tit]=get_xpllbb_value(ccc_value)
                bdatas.append(indata)

            except Exception as e:
                print(e)
        
    except Exception as e:
        logging.info(f'店铺:《{dpName}》,{bid},时间段:{tt},解析ProductOverview报表xlsx文件失败 => {e}')
        return
    logging.info(f'店铺:《{dpName}》,{bid},时间段:{tt},成功解析并获取 {len(bdatas)} 条ProductOverview数据')
    return bdatas

async def plw_xp_get_kj_income_rpt(page:Page,basic_info):

    '''虾皮跨境店下载收入详情报表'''
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sjbm=basic_info['SJBM']
    yybm=basic_info['YYBM']
    is_kj=basic_info['is_KJ']
    sq=SQ_Map.get(country)
    params=await plw_get_params(page,basic_info)
    #获取拨款轮次
    api_url0='https://seller.shopee.cn/api/v4/accounting/cbpc/seller_income/income_overview/get_available_payout_detail_list'
    json_data0=await plw_make_request(page,api_url0,params)
    if json_data0 and json_data0.get('code')==0:
        payout_list=json_data0['data']['list']
        logging.info(f'店铺:《{dpName}》,{bid},成功获取获取 {len(payout_list)} 个轮次')
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取Income报表的轮次列表错误 => {json_data}')
        return
    st_time,et_time=get_shopee_timestamp()
    st_date_str=datetime.datetime.fromtimestamp(st_time).strftime(r'%Y-%m-%d')
    ed_date_str=datetime.datetime.fromtimestamp(et_time-86400).strftime(r'%Y-%m-%d')
    payout_ids=[]
    for payout_item in payout_list:
        payout_date_str=payout_item['payout_date']
        payee_id=payout_item['payout_records'][0]['payee_id']
        payout_id=payout_item['payout_records'][0]['payout_id']
        payout_ids.append(payout_id)

    api_url=f'https://{host}/api/v4/accounting/cbpc/seller_income/income_report/request_income_report_generation'
    payload={"income_category":2,"start_date":st_date_str,"end_date":ed_date_str,"payout_ids":payout_ids}

    json_data=await plw_make_request(page,api_url,params,payload)
    if json_data and json_data.get('code')==0:
        rptid=json_data['data']['report_id']
        logging.info(f'店铺:《{dpName}》,{bid},成功获取获取Income报表reportid:《{rptid}》')
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取Income报表reportid错误 => {json_data}')
        return

    api_url2=f'https://{host}/api/v4/accounting/cbpc/seller_income/income_report/get_income_report'
    params2=copy.copy(params)
    params2['report_id']=rptid
    wait_count2=0
    while True:
        json_data2=await plw_make_request(page,api_url2,params2)
        if json_data2 and json_data2.get('code')==0:
            ccc_status=json_data2['data'].get('status')
            if ccc_status==2:
                logging.info(f'店铺:《{dpName}》,{bid},等待Income报表加载完毕')
                break
            else:
                logging.info(f'店铺:《{dpName}》,{bid},正在加载Income报表,当前状态:{ccc_status}')
        await asyncio.sleep(1)
        wait_count2+=1
        if wait_count2>180:
            logging.info(f'店铺:《{dpName}》,{bid},等待Income报表加载超时退出')
            return
    params3=copy.copy(params)
    api_url3=f'https://{host}/api/v4/accounting/cbpc/seller_income/income_report/query_income_report'
    params3['document_id']=rptid
    json_data3=await plw_make_request(page,api_url3,params3)

    if json_data3 and json_data3.get('code')==0:
        file_path=json_data3['data']['file_path']
        logging.info(f'店铺:《{dpName}》,{bid},成功获取Income报表xlsx文件地址:《{file_path}》')
    
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取Income报表xlsx地址失败 => {json_data3}')
        return

    api_url4=f'https://{host}/api/v4/accounting/cbpc/seller_income/income_report/get_income_document_file'
    params4={
        'path':file_path,
        'cnsc_shop_id':basic_info['shopid']
        
    }
    cot_byte_list= await plw_get_cot(page,api_url4,params4)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},下载Income报表xlsx文件大小:{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
        with open('跨境店收入详情.xlsx','wb') as f:
            f.write(excel_content)
    else:
        logging.info(f'店铺:《{dpName}》,{bid},下载Income报表xlsx文件失败')
        return

    with open('TABLEJSON/ShopeeIncome.json','r',encoding='utf-8') as f:
        col_json=json.load(f)
    cn_to_col={v:k for k,v in col_json.items()}
    cn_cols=list(cn_to_col.keys())
    try:
        file_bytes = BytesIO(excel_content)
        wb = openpyxl.load_workbook(file_bytes)
        ws=wb['Income']
        tit_ind=6
        for r in range(1,ws.max_row+1):
            cvalue=ws.cell(r,1).value
            if cvalue=='编号':
                tit_ind=r
                break
        tits=[ws.cell(tit_ind,c).value for c in range(1,ws.max_column+1)]
        for jj,tit in enumerate(tits[:]):
            if tit is None or str(tit).strip() == "":
                tits[jj]=''.join(random.choices(string.ascii_letters + string.digits, k=8))
        indatas=[]
        for rr in range(tit_ind+1,ws.max_row+1):
            try:
                indata={
                    'bid':bid,
                    'BName':dpName,
                    'Country':country,
                    'SJBM':sjbm,
                    'YYBM':yybm
                }
                for cc in range(1,ws.max_column+1):
                    cur_tit=tits[cc-1]
                    if cur_tit in cn_cols: 
                        indata[cn_to_col[cur_tit]]=ws.cell(rr,cc).value
                indatas.append(indata)
            except Exception as e:
                print(e)
        
    except Exception as e:
        logging.info(f'店铺:《{dpName}》,{bid},解析Income报表xlsx文件失败 => {e}')
        return
    logging.info(f'店铺:《{dpName}》,{bid},成功解析并获取 {len(indatas)} 条Income数据')
    return indatas

async def plw_xp_del_dfaft_and_mark(page:Page,basic_info,d_itemid,dpid,zd_dc_bid,ms='发布失败'):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    res_del=await plw_xp_del_draftpro(page,basic_info,[d_itemid])
    aff_reset=0
    if res_del and res_del.get('code')==0:
        del_list=res_del['data']['result']
        if del_list:
            aff_reset=tms.ExecNoQuery(f"update s1688pro set {zd_dc_bid}='{ms}' where itemid=?",(str(dpid),))
    
    else:
        print(res_del)

    logging.info(f'店铺:《{dpName}》,{bid},草稿箱产品:《{d_itemid}》,无法发布,标记删除:{aff_reset}')


@deepseek_session_singleton
def fanyi_adpro(to_c,dj,pitem,session=None):
    cur_zd_cs=Country_Sql_Zd_Map[to_c]
    zd_ptit=cur_zd_cs['zd_ptit']
    zd_pdes=cur_zd_cs['zd_pdes']
    zd_is_fy_tit=cur_zd_cs['zd_is_fy_tit']
    logging.info(f'国家:《{to_c}》,第 {dj} 个产品,开始生成标题描述')
    pid,ys_itemid,rcode,ys_tit,ys_attrs,is_fy_ensku=pitem
    res_deepseek=get_tit_and_des(session,[ys_tit,ys_attrs],to_c)
    res_fy={'code':0,'ind':dj,'to_c':to_c,'aff_tit_des':0,'aff_sku':0,'aff_ads_move':0,'err_msg':None,'tit_len':0,'des_len':0}
    if res_deepseek:
        tit,des=res_deepseek
        tit=remove_chinese(tit)[1]
        des=remove_chinese(des)[1]
        aff_ss1=tms.ExecNoQuery(f'update s1688pro set {zd_ptit}=?,{zd_pdes}=?,{zd_is_fy_tit}=1 where id=?',(tit,des,pid))
        aff_ss2=0
        if aff_ss1:
            if not is_fy_ensku:
                rs_skus=tms.ExecQuery('select id,sku1,sku2 from S1688ProSKU where itemid=?',(ys_itemid,))
                en_skus=get_en_skus(session,rs_skus)
                aff_sku=0
                if en_skus:
                    for sid,ensku1,ensku2 in en_skus:
                        aff_sku+=tms.ExecNoQuery('update S1688ProSKU set ensku1=?,ensku2=? where id=?',(ensku1,ensku2,int(sid)))
                if aff_sku:
                    aff_isensku=tms.ExecNoQuery(f'update s1688pro set is_fy_ensku=1 where id=?',(pid,))
                res_fy['aff_sku']=aff_sku
                logging.info(f'国家:《{to_c}》,第 {dj} 个产品,翻译 sku {aff_sku} 个,状态:{aff_isensku}')
            else:
                logging.info(f'国家:《{to_c}》,第 {dj} 个产品,无需翻译sku')
            
            aff_ss2=tms.ExecNoQuery('update ShopeeAdsMove set is_fy=1 where random_code=? and to_country=?',(rcode,to_c))
            res_fy['aff_tit_des']=aff_ss1
            res_fy['aff_ads_move']=aff_ss2
            res_fy['tit_len']=len(tit)
            res_fy['des_len']=len(des)
        else:
            res_fy['code']=2
            res_fy['err_msg']='更新标题描述失败'
        logging.info(f'国家:《{to_c}》,第 {dj} 个产品,生成标题长度:{len(tit)},描述长度:{len(des)},S1688Pro状态:{aff_ss1},ShopeeAdsMove状态:{aff_ss2}')
    else:
        res_fy['code']=1
        res_fy['err_msg']='AI生成标题或描述失败'
        logging.info(f'国家:《{to_c}》,第 {dj} 个产品,生成标题描述失败')
    
    return res_fy