from pyppeteer.page import Page
from threading import Thread
from config import *
from queue import Queue
from io import BytesIO
from lxml import etree
from tempfile import TemporaryDirectory
import xml.etree.ElementTree as ET
from requests.adapters import HTTPAdapter
from urllib.parse import urlencode
from flatten_dict import flatten
from tool import calculate_timestamps2,get_utc7_timestamps,repeat_reload,is_decimal,timestamp_to_sql_datetime,upload_camp,get_gdppkw,get_shopee_timestamp
from tool import repeat_goto,find_code,tms,datetime,is_contain000,try_to_float,try_to_int,boldate_chinese_date_range,get_yn_0timestamp
import pandas as pd
import concurrent.futures as thddd
import math,time,json,copy,re,traceback,itertools,os
import requests,asyncio,logging,string,uuid,zipfile

session=requests.session()

def is_contain000(des,elist):
    isfff=False
    for eee in elist:
        if eee in des:
            isfff=True
    return isfff

def add_hmg(ean):

    aff=tms.ExecNoQuery('update allgoodpro set pw=666,groupname=? where pean=?',('类目ID错误',ean))
    logging.info(f'ean:{ean},类目错误记录产品 pw=666:{aff}')

def add_ign(ean):
    aff=tms.ExecNoQuery('update allgoodpro set pw=888,groupname=? where pean=?',('暂时忽略',ean))
    logging.info(f'ean:{ean},加入暂时忽略分组 pw=888:{aff}')

async def make_request(page:Page, url, params=None, data=None):

    if params:
        url = f"{url}?{urlencode(params)}"
    for _ in range(3):
        try:
            if data is None:
                request_script = """
                async function(url) {
                    const response = await fetch(url, {
                        method: 'GET',
                        headers: {
                            'Content-Type': 'application/json'
                        }
                    });
                    const data = await response.json(); 
                    return data; 
                }
                """
                response_data = await page.evaluate(request_script, url)
            else:
                request_script = """
                async function(purl, postData) {
                    const response = await fetch(purl, {
                        method: 'POST',
                        headers: {
                            'Content-Type': 'application/json'
                        },
                        body: JSON.stringify(postData)
                    });
                    const data = await response.json();
                    return data; 
                }
                """
                response_data = await page.evaluate(request_script, url, data)
                #print(response_data)
            return response_data
        except Exception as e:
            logging.error(f'js异步抓包错误 => 《{url}》,{e}')
        await asyncio.sleep(3)

async def js_get_cot(page:Page, url, params=None):

    if params:
        url = f"{url}?{urlencode(params)}"
    for _ in range(3):
        try:
   
            request_script = """
            async function(url) {
                const response = await fetch(url, {
                    method: 'GET',
                    headers: {
                        'Accept': 'application/json, application/force-download, text/plain, */*',
                        'Content-Type': 'application/json'
                    }
                });
                const arrayBuffer = await response.arrayBuffer();
                return Array.from(new Uint8Array(arrayBuffer));
            }
            """
            response_data = await page.evaluate(request_script, url)
            return response_data
        except Exception as e:
            logging.info(f'js异步抓包错误 => {e}')
        await asyncio.sleep(3)

async def js_get_cot2(page:Page, url, params=None,accept='application/vnd.allegro.public.v1+json',content_type='application/json'):

    if params:
        url = f"{url}?{urlencode(params)}"
    for _ in range(3):
        try:
   
            request_script = f"""
            async function(url) {{
                const response = await fetch(url, {{
                    method: 'GET',
                    headers: {{
                        'Accept': '{accept}',
                        'Content-Type': '{content_type}'
                    }}
                }});
                const arrayBuffer = await response.arrayBuffer();
                return Array.from(new Uint8Array(arrayBuffer));
            }}
            """
            response_data = await page.evaluate(request_script, url)
            return response_data
        except Exception as e:
            logging.info(f'js异步抓包错误 => {e}')
        await asyncio.sleep(3)

async def js_req_bol(page:Page, url, params=None, data=None,accept=None,content_type=None):

    if params:
        url = f"{url}?{urlencode(params)}"
    if not accept:
        accept='application/vnd.allegro.public.v1+json'
    if not content_type:
        content_type='application/json'
    for _ in range(3):
        try:
            if data is None:
                request_script = f"""
                    async function(url) {{
                        const response = await fetch(url, {{
                            method: 'GET',
                            headers: {{
                                'Accept': '{accept}',
                                'Accept-Encoding': 'gzip, deflate, br, zstd',
                                'Accept-Language': 'zh-HK,zh-TW;q=0.9,zh;q=0.8',
                                'Content-Type': '{content_type}'
                            }}
                        }});
                        const data = await response.json(); 
                        return data; 
                    }}
                """
                response_data = await page.evaluate(request_script, url)
            else:
                request_script = f"""
                async function(purl, postData) {{
                    const response = await fetch(purl, {{
                        method: 'POST',
                        headers: {{
                            'Accept':'{accept}',
                            'Accept-Encoding':'gzip, deflate, br, zstd',
                            'Accept-Language':'zh-HK,zh-TW;q=0.9,zh;q=0.8',
                            'Content-Type': '{content_type}'
                        }},
                        body: JSON.stringify(postData)
                    }});
                    const data = await response.json();
                    return data; 
                }}
                """
                response_data = await page.evaluate(request_script, url, data)
            
            return response_data
        except Exception as e:
            logging.info(f'js异步抓包{url}错误 => {e}')
        await asyncio.sleep(3)

async def js_req_shopid(page:Page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid=basic_info['shopid']
    if shopid:
        return shopid
    else:
        spdc,ck=await get_ck_and_spc_cds(page)
        api_url=f'https://{basic_info["host"]}/api/selleraccount/shop_info/'
        params={
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
        }
        json_data=await make_request(page,api_url,params)
        if json_data:
            shopid=str(json_data['data']['shop_id'])
            aff=tms.ExecNoQuery('update houtai set shopid=? where BrowserID=?',(shopid,bid))
            logging.info(f'店铺:《{dpName}》,{bid},获取shopid:{shopid},更新后台表:{aff}')
            return shopid
        else:
            logging.info(f'店铺:《{dpName}》,{bid},获取shopid失败')

async def js_get_seller_id(page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    api_url='https://edge.salescenter.allegro.com/hints'
    accept='application/vnd.allegro.internal.v2+json'
    json_data= await js_req_bol(page,api_url,accept=accept)
    if  json_data:
        if json_data['hints']:
            sellerId=json_data['hints'][0]['sellerId']
            logging.info(f'店铺:《{dpName}》,{bid},成功获取sellerId:{sellerId}')
            return sellerId
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取sellerId失败')

async def get_ck_and_spc_cds(page):

    cookies = await page.cookies()
    SPC_CDS=None
    cksstrlist=[]
    for cookie in cookies:
        cksstrlist.append(f"{cookie['name']}={cookie['value']}")
        if cookie['name']=='SPC_CDS':
            SPC_CDS=cookie['value']
    ck=';'.join(cksstrlist)
    return SPC_CDS,ck

async def get_to_do_list(page,basic_info):
    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/sellermisc/homepage/get_to_do_list_summary/'
    api_url=f'https://{host}/api/miscellaneous/homepage/get_to_do_list_summary/'
    params=await get_params(page,basic_info)

    for jjj in range(3):
        json_data=await make_request(page,api_url,params)
        if json_data:
            break
        else:
            await repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《To Do List》接口错误')
        return
    # with open('JSONDATA/首页todolist.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
        to_do_list_data=json_data['data']
        unpaid=None
        to_process_shipment=to_do_list_data['shipment_to_process']
        processed_shipment=to_do_list_data['shipment_processed']
        pending_cancellation=None
        pending_return=to_do_list_data['order_return_refund_cancel']
        banned_products=to_do_list_data['product_banned_deboosted']
        sold_out_products=None
        to_join_bidding=to_do_list_data['promotion_to_join_bidding']
    except Exception as e:
        traceback.print_exc()
        logging.error(f'店铺:《{dpName}》,bid:{bid},解析 to do list 出错 => {e}')
        return
    return [unpaid,to_process_shipment,processed_shipment,pending_cancellation,
            pending_return,banned_products,sold_out_products,to_join_bidding]

async def get_business_insights(page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/mydata/homepage/key-metrics'
    api_url=f'https://{host}/api/mydata/v2/homepage/key-metrics/'
    params=await get_params(page,basic_info)
    params['order_type']='confirmed'


    for jjj in range(3):
        json_data=await make_request(page,url,params)
        if json_data:
            break
        else:
            await repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《Business_Insights》接口错误')
        return
    # with open('JSONDATA/首页Business_Insights.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
    
        busi_data=json_data['data']
        visitors=busi_data['uv']
        visitors_trend=round(busi_data['uv_pct_diff'],4)
        page_views=busi_data['pv']
        page_views_trend=round(busi_data['pv_pct_diff'],4)
        orders=busi_data['orders']
        orders_trend=round(busi_data['orders_pct_diff'],4)
        conversion_rate=round(busi_data['conversion_rate'],4)
        conversion_rate_trend=round(busi_data['conversion_rate_pct_diff'],4)

        return [visitors,visitors_trend,page_views,page_views_trend,
                orders,orders_trend,conversion_rate,conversion_rate_trend]
        
    except Exception as e:
        traceback.print_exc()
        logging.error(f'店铺:《{dpName}》,bid:{bid},解析Business_Insights,出错=> {e}')
        return

async def get_performance(page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/v2/performance/getShopPerformanceForSC'
    params=await get_params(page,basic_info)
    data={

    }


    for jjj in range(3):
        json_data=await make_request(page,url,params,data)
        if json_data:
            break
        else:
            await repeat_goto(page,f'https://{host}/')
            await asyncio.sleep(6)

    if not json_data:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取《Performance》接口错误')
        return
    
    # with open('JSONDATA/首页Performance.json','w',encoding='utf-8') as f:
    #     json.dump(json_data,f,ensure_ascii=False,indent=4)
    try:
        metrics=json_data['metrics']
        metrics_dict={metric['metric_name']:metric['metric_value'] for metric in metrics}
        Non_fulfilment_Rate=metrics_dict['NON_FULFILLMENT_RATE']
        Late_Shipment_Rate=metrics_dict['LATE_SHIPMENT_RATE']
        Preparation_Time=metrics_dict['PREPARATION_TIME']
        My_Penalty=json_data['penalty_points']
        return [Non_fulfilment_Rate,Late_Shipment_Rate,Preparation_Time,My_Penalty]
    except Exception as e:
        traceback.print_exc()
        logging.info(f'店铺:《{dpName}》,bid:{bid},解析Performance,出错=> {e}')
        return

def get_bol_home(ck):
    '''获取阿里狗七天流量信息'''
    #sellerid_url='https://edge.salescenter.allegro.com/hints'
    data_7view_url='https://edge.salescenter.allegro.com/sellers/dashboard/seller-statistics/offer-views/last-7-days'

    headers={
        'Accept':'application/vnd.allegro.public.v1+json',
        'Accept-Encoding':'gzip, deflate, br, zstd',
        'Accept-Language':'zh-HK,zh-TW;q=0.9,zh;q=0.8',
        'cookie':ck,
        'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36'
    }

    res=session.get(data_7view_url,headers=headers)
    logging.info(res.status_code,res.text)
    return res.json()

async def get_all_allegro_pro(page:Page,basic_info):
    req_queue=Queue(-1)
    req_queue.put(1)
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    accept='application/vnd.allegro.web.v2+json'
    api_url='https://edge.salescenter.allegro.com/sale/offers'
    err_count=0
    pdatas=[]

    while True:
        if req_queue.empty():
            logging.info(f'店铺:《{dpName}》,bid:{bid},当前产品页已全部请求,退出循环')
            break
        
        if err_count>30:
            logging.info(f'店铺:《{dpName}》,bid:{bid},错误超过50次,退出循环')
            break
        cur_p=req_queue.get()
        para={
                'publication.marketplace': 'allegro-pl',
                'country.code': 'PL',
                'limit': 1000,
                'publication.status': 'ACTIVE',
                'sellingMode.format': 'BUY_NOW',
                'offset': (cur_p-1)*1000
            }
        
        res_data=await js_req_bol(page,api_url,para,accept=accept)
        if not res_data:
            err_count+=1
            await repeat_reload(page)
            await asyncio.sleep(6)
            req_queue.put(cur_p)
            continue

        offers=res_data['offers']
        for item in offers:
            try:
                stats=item['marketplaces']['data'][0]['stats']
                pdatas.append({
                    'pean':item['gtin']['value'],
                    'offer_id':item['id'],
                    'pl_price':int(item['sellingMode']['price']['amount']),
                    'pname':item['name'],
                    'pic':item['photoUrl'],
                    'sellerId':item['seller']['id'],
                    'sold':stats['sold'],
                    'favorites':stats['favorites'],
                    'visits':stats['visits'],
                    'leimu':item['category'].get('id'),
                    'publish_time':item['publication']['startedAt']
                })
            except Exception as e:
                logging.error(f'解析参数失败 =>{e}')

        logging.info(f'店铺:《{dpName}》,bid:{bid},第{cur_p}页,共获取了{len(offers)}个产品,当前总产品数{len(pdatas)}')
        
        if cur_p==1:
            totalCount=res_data['totalCount']
            max_p=math.ceil(totalCount/1000)
            for p in range(2,max_p+1):
                req_queue.put(p)
            logging.info(f'店铺:《{dpName}》,bid:{bid},获取最大页数:{max_p},任务总页数剩余:{req_queue.qsize()} 页')
        
        logging.info(f'店铺:《{dpName}》,bid:{bid},当前位于第 {cur_p} 页,剩余:{req_queue.qsize()} 页')
        time.sleep(1)
    logging.info(f'店铺:《{dpName}》,bid:{bid},共{max_p}页,获取了{len(pdatas)}个产品')
    return pdatas

async def get_alegro_total_count(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    accept='application/vnd.allegro.web.v2+json'
    api_url='https://edge.salescenter.allegro.com/sale/offers'


    para={
        'publication.marketplace': 'allegro-pl',
        'country.code': 'PL',
        'limit': 20,
        'publication.status': 'ACTIVE',
        'sellingMode.format': 'BUY_NOW',
        'offset': 0
    }
        
    res_data=await js_req_bol(page,api_url,para,accept=accept)
    if res_data:
        totalCount=res_data['totalCount']
        logging.info(f'店铺:《{dpName}》,bid:{bid},成功获取总上架产品数:{totalCount}')
        return totalCount
    else:
        logging.info(f'店铺:《{dpName}》,bid:{bid},获取总商家产品数失败')

async def get_xp_probb(page:Page,basic_info,serach_id=None,px='add_to_cart_buyers.desc'):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    st_time,ed_time=calculate_timestamps2(30)
    spdc,ck=await get_ck_and_spc_cds(page)
    key_tc='add_to_cart_buyers'
    if px=='confirmed_buyers.desc':
        key_tc='confirmed_buyers'
    
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'start_time': st_time,
        'end_time': ed_time,
        'period': 'past30days',
        'metric_ids': 'all',
        'order_by': px,
        'page_size': 50,
        'page_num': 1,
        'category_type': 'shopee',
        'category_id': -1
    }
    req_queue=Queue(-1)
    req_queue.put(1)
    datas=[]
    istc=False
    api_url='https://seller.shopee.co.id/api/mydata/v3/product/performance/'
    err_count=0
    while True:
        if istc:
            break
        if req_queue.empty():
            break
        page_num=req_queue.get()
        params['page_num']=page_num

        jsdata=await make_request(page,api_url,params)
        
        if not jsdata:
            err_count+=1
            logging.info(f'店铺:《{dpName}》,bid:{bid},请求第{page_num}页产品报表出错')
            req_queue.put(page_num)
            if err_count>50:
                logging.info(f'店铺:《{dpName}》,bid:{bid},错误超过50次,退出循环')
                istc=True
            await repeat_reload(page)
            await asyncio.sleep(3)
            continue

        items=jsdata['result']['items']
        total=jsdata['result']['total']
        if len(items)==0:
            break
        
        if page_num==1:
            max_p=math.ceil(total/50)
            for ppp in range(2,max_p+1):
                req_queue.put(ppp)
            logging.info(f'店铺:《{dpName}》,bid:{bid},获取最大页数为{max_p}')

        for item in items:
            itemid=str(item['id'])
            if serach_id:
                if serach_id==itemid:
                    mb_pro=copy.copy(item)
                    logging.info(f'店铺:《{dpName}》,bid:{bid},已获取itemid:{serach_id}的产品信息')
                    istc=True
                    break
            if item[key_tc]<1:
                istc=True
                break    
            datas.append(item)

    logging.info(f'店铺:{dpName},共获取{len(datas)} 个加购产品信息')
    if serach_id:
        return mb_pro
    else:
        return datas

async def get_all_xp_order(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    st_time,ed_time=calculate_timestamps2(30)
    spdc,ck=await get_ck_and_spc_cds(page)
    shopid=await js_req_shopid(page,basic_info)
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'start_time': st_time,
        'end_time': ed_time,
        'period': 'past30days',
        'metric_ids': 'all',
        'order_by': 'confirmed_buyers.desc',
        'page_size': 50,
        'page_num': 1,
        'category_type': 'shopee',
        'category_id': -1
    }
    req_queue=Queue(-1)
    req_queue.put(1)
    api_url='https://seller.shopee.co.id/api/mydata/v3/product/performance/'
    err_count=0
    datas=[]
    istc=False
    while True:
        if istc:
            break
        if req_queue.empty():
            break
        page_num=req_queue.get()
        params['page_num']=page_num
        jsdata=await make_request(page,api_url,params)
        
        if not jsdata:
            req_queue.put(page_num)
            err_count+=1
            logging.info(f'店铺:《{dpName}》,bid:{bid},请求第{page_num}页产品报表出错')
            if err_count>10:
                logging.info(f'店铺:《{dpName}》,bid:{bid},错误超过10次,退出循环')
                raise
            await repeat_reload(page)
            await asyncio.sleep(3)
            continue

        items=jsdata['result']['items']
        total=jsdata['result']['total']
        if len(items)==0:
            break
        if page_num==1:
            max_p=math.ceil(total/50)
            for ppp in range(2,max_p+1):
                req_queue.put(ppp)
            logging.info(f'店铺:《{dpName}》,bid:{bid},获取订单页最大页数为{max_p}')

        if len(items)==0:
            break

        for item in items:
            if item['confirmed_buyers']<1:
                istc=True
                break
            itemid=str(item['id'])

            purl=f'https://shopee.co.id/product/{shopid}/{itemid}'
            pname=item['name']
            MainImage=f"https://down-id.img.susercontent.com/file/{item['image']}"
            rcode=''
            ph_itemid=''
            mat_sjm= re.search(r'.*\|(.{8})\|$',pname)
            if mat_sjm:
                rcode=mat_sjm.group(1)
                rs_rcodes=tms.ExecQuery('select itemid from RandomCodes where RandomCode=?',(rcode,))
                if len(rs_rcodes)>0:
                    ph_itemid=rs_rcodes[0][0]


            delist=0 if item['status']==1 else 1
            models=item['models']
            for model in models:
                skuid=str(model['id'])
                skuname=model['name']
                sku2=None
                skulist=skuname.split(',',maxsplit=1)
                sku1=skulist[0]
                if len(skulist)==2:
                    sku2=skulist[1]

                if not rcode:
                    rcode,ph_itemid=find_code([itemid,sku1,sku2,pname])

                Quantity=model['confirmed_buyers']
                if Quantity>0:
                    datas.append([bid,shopid,itemid,purl,pname,rcode,ph_itemid,MainImage,delist,skuid,sku1,sku2,Quantity])    
    logging.info(f'店铺:{dpName},{bid},共获取{len(datas)} 个出单SKU')

    return datas

async def get_camp_info(page:Page,basic_info,ishome=False):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    is_kj=basic_info['is_KJ']
    sq=7
    if country in ['马来西亚','新加坡']:
        sq=8
    st_time,ed_time=get_shopee_timestamp('last_month',sq)
    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await get_params(page,basic_info)
    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await make_request(page,api_url,params,data)
    total_count=json_data['data']['total']
    entry_list=json_data['data']['entry_list']

    for entry in entry_list:
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement!='targeting':
            campaign_id=str(entry['campaign']['campaign_id'])
            item_id=str(entry['manual_product_ads']['item_id'])
            start_time=entry['campaign']['start_time']
            now_time=int(time.time())
            trait_list=entry['trait_list']
            if not campaign_dict.get(item_id):
                if 'item_sold_out' not in trait_list:
                    create_days=math.ceil((now_time-start_time)/86400)
                    campaign_dict[item_id]=[campaign_id,create_days]
    
    logging.info(f'店铺:《{dpName}》,bid:{bid},在用广告数:{len(campaign_dict.keys())},历史广告总数:{total_count}')
    if ishome:
        return total_count
    return campaign_dict

async def get_all_camp_info(page:Page,basic_info,dayjg=90):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    st_time,ed_time=calculate_timestamps2(dayjg)
    offset=0
    limit=500
    campaign_dict={}
    api_url='https://seller.shopee.co.id/api/pas/v1/homepage/query/'
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await make_request(page,api_url,params,data)
    
    total_count=json_data['data']['total']
    entry_list=json_data['data']['entry_list']
    camp_info_list=[]
    camp_sold_outs=[]
    for entry in entry_list:
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement!='targeting':
            campaign_id=str(entry['campaign']['campaign_id'])
            item_id=str(entry['manual_product_ads']['item_id'])
            start_time=entry['campaign']['start_time']
            now_time=int(time.time())
            trait_list=entry['trait_list']
            if not campaign_dict.get(item_id):
                if 'item_sold_out' not in trait_list:
                    create_days=math.ceil((now_time-start_time)/86400)
                    campaign_dict[item_id]=[campaign_id,create_days]
                    camp_info_list.append(entry)
                else:
                    aff_sold_out=tms.ExecNoQuery('update ShopeeADCost set paused=2 where campaign_id=?',(campaign_id,))
                    aff_sold_out2=tms.ExecNoQuery("update campaign set state='soldout' where campaign_id=?",(campaign_id,))
                    logging.info(f'店铺:《{dpName}》,bid:{bid},item_id:{item_id},缺货标记:{aff_sold_out},camp标记:{aff_sold_out2}')
                    camp_sold_outs.append(item_id)
    
    itemids=[str(cam['manual_product_ads']['item_id']) for cam in camp_info_list]
    ppps=itemids+camp_sold_outs
    aff_paused=0
    if itemids:
        wwwstr=','.join([f"'{iii}'" for iii in ppps])
        shopid=basic_info['shopid']
        aff_paused=tms.ExecNoQuery(f"update ShopeeADCost set paused=1 where shopid='{shopid}' and product_id not in ({wwwstr})")
        aff_paused2=tms.ExecNoQuery(f"update campaign set state='paused' where shopid='{shopid}' and product_id not in ({wwwstr})")
    logging.info(f'店铺:《{dpName}》,{bid},共有 {len(camp_info_list)} 个在用广告,标注{aff_paused}个暂停广告,{len(camp_sold_outs)} 个缺货')


    return camp_info_list

async def get_camps(page:Page,basic_info,is_ref=False,tz_ended=False,is_del_task=False):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']

    spdc,ck=await get_ck_and_spc_cds(page)
    url='https://seller.shopee.co.id/api/pas/v1/homepage/query/'
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }
    st_time,ed_time=calculate_timestamps2()
    offset=0
    limit=500
    campaign_dict={}
    need_save=[]
    camp_need_change_daily=[]
    while True:
        data={
            "start_time":st_time,
            "end_time":ed_time,
            "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
            "offset":offset,
            "limit":limit
            }
        json_data=await make_request(page,url,params,data)
        if not json_data:
            raise
        total_count=json_data['data']['total']
        entry_list=json_data['data']['entry_list']

        
        for entry in entry_list:
            product_placement=entry['manual_product_ads']['product_placement']
            if product_placement!='targeting':
                campaign_id=str(entry['campaign']['campaign_id'])
                item_id=str(entry['manual_product_ads']['item_id'])
                trait_list=entry['trait_list']
                if not campaign_dict.get(item_id):
                    if 'item_sold_out' not in trait_list:
                        campaign_dict[item_id]=campaign_id
                        need_save.append(entry)
                daily_budget=entry['campaign']['daily_budget']
                if daily_budget==0:
                    camp_need_change_daily.append(campaign_id)
            
        offset=limit
        limit=limit+500
        if len(campaign_dict.keys())==0 or offset>=total_count:
            break
        time.sleep(1)

    shopid=await js_req_shopid(page,basic_info)
    # if camp_need_change_daily:
    #     await js_change_daily(page,basic_info,camp_need_change_daily)
    # aff_c=tms.ExecNoQuery('update ShopHome set ad_count=? where shopid=?',(len(campaign_dict.keys()),shopid))
    
    
    if tz_ended:
        aff_end=0
        pause_pid_count=0
        pid_actives=list(campaign_dict.keys())
        rs_pids=tms.ExecQuery('select product_id from campaign where bid=?',(bid,))
        for r in rs_pids:
            ccc_pid=r[0]
            if ccc_pid not in pid_actives:
                aff000=tms.ExecNoQuery('update ShopeeeADTZKW set ad_ended=1 where productid=?',(ccc_pid,))
                if aff000:
                    aff_end+=aff000
                    pause_pid_count+=1
        logging.info(f'店铺:《{dpName}》,{bid},标记{pause_pid_count}个暂停广告,{aff_end}个拓展词')

    if is_ref:        
        upload_camp(bid,dpName,need_save,shopid)
    logging.info(f'店铺:《{dpName}》,{bid},获取的campaign_dict长度:{len(campaign_dict.keys())},最大长度:{total_count}')

    pid_str=','.join([f"'{c_pid}'" for c_pid in campaign_dict.keys()])

    if is_del_task:
        aff_del=tms.ExecNoQuery(f"delete from ShopeeADTask where shopid='{shopid}' and product_id not in ({pid_str}) ")
        logging.info(f'铺:《{dpName}》,{bid},删除无效任务:{aff_del}个')

    return campaign_dict

async def get_camps_with_tit(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    url='https://seller.shopee.co.id/api/pas/v1/homepage/query/'
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }
    st_time,ed_time=calculate_timestamps2()
    offset=0
    limit=500
    campaign_dict={}
    need_save=[]
    while True:
        data={
            "start_time":st_time,
            "end_time":ed_time,
            "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
            "offset":offset,
            "limit":limit
            }
        json_data=await make_request(page,url,params,data)
        if not json_data:
            raise
        total_count=json_data['data']['total']
        entry_list=json_data['data']['entry_list']

        
        for entry in entry_list:
            product_placement=entry['manual_product_ads']['product_placement']
            if product_placement!='targeting':
                campaign_id=str(entry['campaign']['campaign_id'])
                item_id=str(entry['manual_product_ads']['item_id'])
                trait_list=entry['trait_list']
                tit=entry['title']
                if not campaign_dict.get(item_id):
                    if 'item_sold_out' not in trait_list:
                        campaign_dict[item_id]=[campaign_id,tit]
                        need_save.append(entry)
            
        offset=limit
        limit=limit+500
        if len(campaign_dict.keys())==0 or offset>=total_count:
            break
        time.sleep(1)
    

    logging.info(f'店铺:《{dpName}》,{bid},获取的campaign_dict长度:{len(campaign_dict.keys())},最大长度:{total_count}')

    return campaign_dict

async def get_entry_list(page:Page,basic_info,is_contain_sold_out=False,ddd='last_three_month'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    country=basic_info['Country']
    host=basic_info['host']
    iskj=basic_info['is_KJ']
    sq=7
    if country in ['马来西亚','新加坡']:
        sq=8
    st_time,ed_time=get_shopee_timestamp(ddd,sq)
    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await get_params(page,basic_info)

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"ongoing","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await make_request(page,api_url,params,data)
    
    total_count=json_data['data']['total']
    entry_list=json_data['data']['entry_list']
    camp_list=[]
    for entry in entry_list:
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement!='targeting':
            campaign_id=str(entry['campaign']['campaign_id'])
            item_id=str(entry['manual_product_ads']['item_id'])
            start_time=entry['campaign']['start_time']
            now_time=int(time.time())
            trait_list=entry['trait_list']
            if not campaign_dict.get(item_id):
                if is_contain_sold_out:
                    if 'item_sold_out' not in trait_list:
                        create_days=math.ceil((now_time-start_time)/86400)
                        campaign_dict[item_id]=[campaign_id,create_days]
                        camp_list.append(entry)
                else:
                    create_days=math.ceil((now_time-start_time)/86400)
                    campaign_dict[item_id]=[campaign_id,create_days]
                    camp_list.append(entry)
                
    
    logging.info(f'店铺:《{dpName}》,bid:{bid},在用广告数:{len(campaign_dict.keys())},历史广告总数:{total_count}')
    return camp_list

async def get_need_pause_ad(page:Page,basic_info,ccc_pids):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    camp_infos=await get_all_camp_info(page,basic_info)
    camp_need_pasue_list=[]
    all_camp_ids=[]
    sss=[]
    #订单已处理暂停
    rs_order_iscl=tms.ExecQuery('select distinct itemid from ShopeeOrder where shi_fou=1')
    itemids_iscl=[r[0] for r in rs_order_iscl]
    now_time=int(time.time())
    for entry in camp_infos:
        campaign_id=str(entry['campaign']['campaign_id'])
        item_id=str(entry['manual_product_ads']['item_id'])
        camp_cost=entry['report']['cost']/100000
        camp_broad_order=entry['report']['broad_order']
        start_time=entry['campaign']['start_time']
        create_days=round((now_time-start_time)/86400)
        sss.append({
            'pid':item_id,
            'camid':campaign_id,
            'cost':camp_cost,
            'sold':camp_broad_order
        })
        if camp_cost>=40000 and camp_broad_order==0:
            print(camp_cost,camp_broad_order)
            camp_need_pasue_list.append(campaign_id)
        elif camp_cost>=80000 and camp_broad_order<2:
            print(camp_cost,camp_broad_order)
            camp_need_pasue_list.append(campaign_id)
        elif camp_cost>=120000 and camp_broad_order<3:
            print(camp_cost,camp_broad_order)
            camp_need_pasue_list.append(campaign_id)
        elif item_id in ccc_pids:
            logging.info(f'店铺:《{dpName}》,{bid},广告:{campaign_id},pid:{item_id},数据表《ShopeeADCost》加购条件不合格')
            camp_need_pasue_list.append(campaign_id)
        elif create_days>=14 and camp_broad_order==0:
            logging.info(f'店铺:《{dpName}》,{bid},广告:{campaign_id},超过14天没单')
            camp_need_pasue_list.append(campaign_id)
        elif item_id in itemids_iscl:
            logging.info(f'店铺:《{dpName}》,{bid},广告:{campaign_id},pid:{item_id},在订单表已处理')
            camp_need_pasue_list.append(campaign_id)

        all_camp_ids.append(campaign_id)
    # with open(f'JSONDATA/三个月广告数据{dpName}.json','w',encoding='utf-8') as f:
    #     json.dump(sss,f,ensure_ascii=False,indent=4)
    
    return camp_need_pasue_list,all_camp_ids

async def get_POD_count(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    api_url='https://seller.shopee.co.id/api/v3/mpsku/list/v2/get_list_count'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'list_types': 'live_all,delisted,draft',
        'list_type_for_quick_filter': 'live_all'
    }
    
    json_data=await make_request(page,api_url,params)
    count_infos=json_data['data']['count_infos']
    logging.info(count_infos)
    unpublish_count=0
    for count_info in count_infos:
        if count_info['list_type']=='live_all':
            live_count=count_info['count']
        elif count_info['list_type'] in ['delisted','draft']:
            unpublish_count+=count_info['count']
    need_publish_count=2000-live_count-unpublish_count
    logging.info(f'店铺:《{dpName}》,bid:{bid},已上架:{live_count},已下架:{unpublish_count},需上架:{need_publish_count}')
    return int(need_publish_count*1.1)

async def get_xp_report(page:Page,file_names,basic_info):

    
    api_url='https://seller.shopee.co.id/api/tool/mass_product/get_mass_record_list/'
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    params={'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
            'page_number': 1,
            'page_size': 20,
            'operation_type': 2
        }
    resppp={}

    while True:
        js_data=await make_request(page,api_url,params)
        rp=js_data['data']['list']
        if rp:
            for rrr in rp:
                cfile_name=rrr['user_file_name'] 
                if cfile_name in file_names:
                    record_status=rrr["record_status"]
                    logging.info(f'店铺:《{dpName}》,{bid},等待文件《{cfile_name}》上传,当前状态:{record_status}')
                    if record_status not in [2,3]:
                        resppp[cfile_name]=copy.copy(rrr)

        if set(resppp.keys())==set(file_names):
            logging.info(f'店铺:《{dpName}》,{bid},{len(file_names)} 个文件已全部等待完毕')
            return resppp
        await asyncio.sleep(10)

async def pause_ad(page:Page,basic_info,camp_ids):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    api_url='https://seller.shopee.co.id/api/pas/v1/homepage/mass_edit/'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
    }
    data={"campaign_id_list":camp_ids,"type":"pause"}
    cam_str=','.join([f"'{camid}'" for camid in camp_ids])
    aff=tms.ExecNoQuery(f'update ShopeeADCost set paused=1 where campaign_id in ({cam_str})')
    json_data=await make_request(page,api_url,params,data)
    
    return json_data
        
async def download_llbb(page:Page,basic_info):


    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    
    wait_count=0
    while True:
        spdc,ck=await get_ck_and_spc_cds(page)
        api_date_url='https://seller.shopee.co.id/api/mydata/v2/product/performance/export/'
        st_time,ed_time=get_utc7_timestamps()
        params={
            'start_ts': st_time,
            'end_ts': ed_time,
            'period': 'past30days',
            'sort_by': '',
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2
        }
        
        js_data=await make_request(page,api_date_url,params)
        #logging.info(js_data)
        rp_id=js_data['data']['report_id']
        file_name=js_data['data']['report_file_name']
        logging.info(f'店铺:《{dpName}》,{bid},成功获取报表ID:{rp_id},流量报表文件名:《{file_name}》,开始网页加载报表...')

        api_cre_url='https://seller.shopee.co.id/api/v3/settings/get_report/'
        params2={
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
            'report_id': rp_id

        }
        
        
        isdown=False
        while True:
            res_get_rp=await make_request(page,api_cre_url,params2)
            #logging.info(res_get_rp)
            bb_status=res_get_rp['data']['status']
            if res_get_rp['code']==0:
                if bb_status==2:
                    logging.info(f'店铺:《{dpName}》,bid:{bid},网页加载流量报表完毕,开始下载报表...')
                    isdown=True
                    break
                elif res_get_rp['data']['status']==4:
                    logging.info(f'店铺:《{dpName}》,bid:{bid},网页加载流量报表failue,等待一分钟,重新加载页面...')
                    await asyncio.sleep(65)
                    wait_count+=60
                    await page.reload()
                    break
            else:
                logging.info(f'店铺:《{dpName}》,bid:{bid},网页加载失败,直接报错')
                raise
            logging.info(f'店铺:《{dpName}》,bid:{bid},等待网页加载报表中,当前状态值:{bb_status}')

            await asyncio.sleep(3)
            wait_count+=3
        if isdown:
            break
        wait_count+=1
        if wait_count>180:
            return
            
        
    api_down_url='https://seller.shopee.co.id/api/v3/settings/download_report/'

    cot_byte_list= await js_get_cot(page,api_down_url,params2)
    logging.info(f'店铺:《{dpName}》,bid:{bid},流量报表内容长度{len(cot_byte_list)}')
    # with open(f'JSONDATA/{bid}_{file_name}','wb') as f:
    #     f.write(bytes(cot_byte_list))
    excel_content = BytesIO(bytes(cot_byte_list))

    return excel_content

async def js_bol_get_orders(page:Page,basic_info):

    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    api_url='https://edge.salescenter.allegro.com/seller/orders'
    paras={
        'sort': '-orderDate',
        'limit': 120
    }
    act='application/vnd.allegro.public.v1+json'
    cty='application/vnd.allegro.public.v1+json'
    js_data=await js_req_bol(page,api_url,paras,accept=act,content_type=cty)

    # with open('JSONDATA/阿里狗订单.json','w',encoding='utf-8') as f:

    #     json.dump(js_data,f,ensure_ascii=False,indent=4)
    
    return js_data

async def js_bol_get_mes(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    offset=0
    limit=10
    api_url='https://edge.salescenter.allegro.com/message-center/threads'
    act='application/vnd.allegro.public.v1+json'
    cty='application/vnd.allegro.public.v1+json'
    mes_list=[]
    j=1
    while True:
        paras={
            'limit': limit,
            'offset': offset
        }

        js_data=await js_req_bol(page,api_url,paras,accept=act,content_type=cty)
        if not js_data:
            return None
        cur_mes_list=js_data['threads']
        if len(cur_mes_list)==0:
            break
        mes_list.extend(cur_mes_list)
        logging.info(f'店铺:《{dpName}》,{bid},第 {j} 页成功获取 {len(cur_mes_list)}条客服消息,当前共 {len(mes_list)} 条客服消息')
        if len(cur_mes_list)<10:
            break
        j+=1
        
    # with open('JSONDATA/阿里狗客人信息.json','w',encoding='utf-8') as f:

    #     json.dump(js_data,f,ensure_ascii=False,indent=4)
    
    return mes_list

async def js_bol_get_discussions(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
   
    api_url='https://edge.salescenter.allegro.com/mediation/disputes/seller'
    act='application/vnd.allegro.public.v2+json'
    cty='application/vnd.allegro.public.v2+json'
    err_count=0
    pdatas=[]
    req_queue=Queue(-1)
    req_queue.put(0)
    while True:
        if req_queue.empty():
            logging.info(f'店铺:《{dpName}》,bid:{bid},当前纠纷页已全部请求,退出循环')
            break
        
        if err_count>10:
            logging.info(f'店铺:《{dpName}》,bid:{bid},错误超过10次,退出循环')
            return
        cur_p=req_queue.get()
        paras={
            'filterStatus': 'ALL',
            'itemsPerPage': 10,
            'page': cur_p,
            'messagesFilterStatus':''
        }
        
        res_data=await js_req_bol(page,api_url,paras,accept=act,content_type=cty)
        if not res_data:
            err_count+=1
            await repeat_reload(page)
            await asyncio.sleep(6)
            req_queue.put(cur_p)
            continue

        offers=res_data['content']
        pdatas.extend(offers)
        logging.info(f'店铺:《{dpName}》,bid:{bid},第{cur_p+1}页,共获取了{len(offers)}个买家纠纷,当前总纠纷数:{len(pdatas)}')
        
        if cur_p==0:
            totalPages=res_data['totalPages']
            for p in range(1,totalPages):
                req_queue.put(p)
            logging.info(f'店铺:《{dpName}》,bid:{bid},获取最大页数:{totalPages},任务总页数剩余:{req_queue.qsize()} 页')
        
        logging.info(f'店铺:《{dpName}》,bid:{bid},当前位于第 {cur_p+1} 页,剩余:{req_queue.qsize()} 页')
        time.sleep(1)
        
    # with open('JSONDATA/阿里狗纠纷.json','w',encoding='utf-8') as f:

    #     json.dump(pdatas,f,ensure_ascii=False,indent=4)
    
    return pdatas

async def js_bol_get_report_id(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']

    api_url='https://edge.salescenter.allegro.com/api/v3/sale/offers/bulk-imports'
    err_count=0
    req_queue=Queue(-1)
    req_queue.put(1)
    seller_id=await js_get_seller_id(page,basic_info)
    rs_fid=tms.ExecQuery('select fileid from AlFileReport where sellerID=?',(seller_id,))
    has_fids=[r[0] for r in rs_fid]
    logging.info(f'店铺:《{dpName}》,bid:{bid},已经保存了 {len(has_fids)} 个跟卖报告ID')
    is_tc=False
    suc=0
    while True:
        if req_queue.empty():
            logging.info(f'店铺:《{dpName}》,bid:{bid},当前纠纷页已全部请求,退出循环')
            break
        if err_count>30:
            logging.info(f'店铺:《{dpName}》,bid:{bid},错误超过30次,退出循环')
            break
        cur_p=req_queue.get()
        paras={
            'page':cur_p
        }
        res_data=await js_req_bol(page,api_url,paras,accept='application/json')
        if not res_data:
            err_count+=1
            await repeat_reload(page)
            await asyncio.sleep(6)
            req_queue.put(cur_p)
            continue
            
        rpots=res_data['imports']
        aff=0
        for item in rpots:
            fid=item['id']
            filename=item['filename']
            uploadDate=item['uploadDate']
            status=item['status']
            all=item['allEvents']
            suc=item['successfulEvents']
            fail=item['failedEvents']
            time_format = '%a, %d %b %Y %H:%M:%S %z'
            parsed_time = datetime.datetime.strptime(uploadDate, time_format)
            zds=['sellerID','FileID','shopID','bid','[all]','suc','fail','status','downloadPath','uploadDate']
            params=[seller_id,fid,userName,bid,all,suc,fail,status,filename,int(parsed_time.timestamp())]
            if fid in has_fids:
                is_tc=True
                break
            aff+=tms.merge_into('AlFileReport',zds,params,['FileID'])

        suc+=aff
        logging.info(f'店铺:《{dpName}》,{bid},第{cur_p}页,共 {len(rpots)} 个报告,其中新保存{aff}个')

        if is_tc:
            logging.info(f'店铺:《{dpName}》,{bid},位于第{cur_p}页,已采集全部新报告,退出循环')
            break
        
        if cur_p==1:
            totalElements=res_data['totalElements']
            max_p=math.ceil(totalElements/20)
            for p in range(2,max_p+1):
                req_queue.put(p)
            logging.info(f'店铺:《{dpName}》,{bid},获取最大页数:{max_p},任务总页数剩余:{req_queue.qsize()} 页')
        
        logging.info(f'店铺:《{dpName}》,{bid},当前位于第 {cur_p} 页,剩余:{req_queue.qsize()} 页')
        time.sleep(1)
    
    return seller_id,suc

pat_lm=re.compile(r'\((\d+)\)(?!.*\(\d+\))')

async def update_ALPayUrl(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    pay_api_url='https://edge.salescenter.allegro.com/billing/my-entries'
    offset=0
    limit=100
    pdatas=[]
    err_count=0
    while True:

        if err_count>=3:
            logging.error(f'店铺:《{dpName}》,{bid}, 请求付费链接出错')
            break
        params={
            'marketplaceId': 'allegro-pl',
            'offset': offset,
            'limit': limit,
            'dateFrom': '2018-11-14T06:56:23.038Z',
            'billingTypes': 'LIS'
        }
        paydatas=await js_req_bol(page,pay_api_url,params)

        if paydatas is None:
            return 0

        for item in paydatas:
            try:
                pay_id=item['id']
                offerid=item['offer']['id']
                pname=item['offer']['name']
                pay_type=item['billingType']
                billingDate=item['billingDate']
                billingTS = datetime.datetime.strptime(billingDate, "%Y-%m-%dT%H:%M:%S.%fZ").timestamp()
                billingTS=int(billingTS)
                current_datetime = datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S")
                amount=item['value']['amount']
                balance=item['balance']
                sellerid=item['userId']
                pdatas.append([pay_id,offerid,pname,pay_type,amount,balance,sellerid,current_datetime,billingDate,billingTS])
            except Exception as e:
                logging.error(f'店铺:《{dpName}》,{bid},付费链接解析错误 => {e}')

        if len(paydatas)==0:
            logging.info(f'店铺:《{dpName}》,{bid},付费链接已全部获取,共 { len(pdatas)} 个 ,退出循环')
            break
        offset+=limit
    
    suc=0
    for pdata in pdatas:
        pay_id,offerid,pname,pay_type,amount,balance,sellerid,current_datetime,billingDate,billingTS=pdata
        rs=tms.ExecQuery('select count(*) from AlOperation where PayID=?',(pay_id,))
        if rs[0][0]>0:
            logging.info(f'店铺:《{dpName}》,{bid}, offerid:{offerid}已存在,忽略')
            continue
        else:
            zds=['PayID','UserID','SellerId','Bid','Bname','BGroupName','offerid','pean','pname','billingDate',
                'billingTS','billingType','amount','Balance','SJBM','YYBM','uploadTime']
            rs_gm=tms.ExecQuery('select pean from allgm where SelfOfferID=? and pean is not null',(offerid,))
            if len(rs_gm)>0:
                pean=rs_gm[0][0]
                params=[pay_id,userName,sellerid,bid,dpName,gname,offerid,pean,pname,billingDate,
                        billingTS,pay_type,amount,balance,sjbm,yybm,current_datetime]

                aff_algd=tms.ExecNoQuery('update allgoodpro set pw=-2 where pean=?',(pean,))
                aff_qqean=tms.merge_into('qqEAN',['pean','des'],[pean,f'店铺:《{dpName}》,{bid}是付费链接'],['pean'],True)
                logging.info(f'offerid:{offerid},拉黑 pean:{pean},状态:{aff_algd},加到qqEAN:{aff_qqean}')
                msg='offerid匹配到EAN'
            else:
                params=[pay_id,userName,sellerid,bid,dpName,gname,offerid,None,pname,billingDate,
                    billingTS,pay_type,amount,balance,sjbm,yybm,current_datetime]
                
                msg='offerid匹配不到EAN'

            zdstr=','.join(zds)
            wstr=','.join(['?' for _ in params])
            sqlstr=f'insert into AlOperation({zdstr}) values({wstr})'
            aff=tms.ExecNoQuery(sqlstr,params)
            logging.info(f'店铺:《{dpName}》,{bid},付费链接:{offerid},{msg}:{aff}')
            suc+=aff
    logging.info(f'店铺:《{dpName}》,{bid},更新 {suc} 个付费链接')

    return 1
    
async def update_ALHomeinfo(page:Page,basic_info):
    
    sellerId=basic_info['sellerId']
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    dp_status='正常'
    el_dp_status=await page.xpath('//h6[text()="Sales suspension"]')
    if el_dp_status:
        dp_status='封'
    
    cot=await page.content()
    jx_html=etree.HTML(cot)
    available_funds=None
    el_available_funds=jx_html.xpath('//span[text()="Available funds"]/../..//output/text()')
    if el_available_funds:
        available_funds_str=el_available_funds[0]
        available_funds_str=available_funds_str.replace('PLN','').replace('～','').replace("\xa0", "").strip()
        available_funds=try_to_float(available_funds_str)
    
    settlements_with_allegro=None
    el_settlements_with_allegro=jx_html.xpath('//span[text()="Settlements with Allegro"]/../..//output/text()')
    if el_settlements_with_allegro:
        settlements_with_allegro_str=el_settlements_with_allegro[0]
        settlements_with_allegro_str=settlements_with_allegro_str.replace('PLN','').replace('～','').replace("\xa0", "").strip()
        settlements_with_allegro=try_to_float(settlements_with_allegro_str)
    

    summary_of_costs=None
    el_summary_of_costs=jx_html.xpath('//span[text()="Fee summary"]/../span[2]/time/text()')
    if el_summary_of_costs:
        summary_of_costs_str=el_summary_of_costs[0]
        summary_of_costs_str=summary_of_costs_str.strip().strip('()')
        summary_of_costs=boldate_chinese_date_range(summary_of_costs_str)


    sales_value=None
    el_sales_value=jx_html.xpath('//span[text()="Sales and delivery value"]/../../div/span/strong/text()')
    if el_sales_value:
        sales_value_str=el_sales_value[0]
        sales_value_str=sales_value_str.replace('PLN','').replace('～','')
        
        sales_value=try_to_float(re.sub(r"\s", "", sales_value_str))
    
    sales_costs=None
    el_sales_costs=jx_html.xpath('//span[text()="Total fees"]/../../div/span/strong/text()')
    if el_sales_costs:
        sales_costs_str=el_sales_costs[0]
        sales_costs_str=sales_costs_str.replace('PLN','').replace('～','')
        sales_costs=try_to_float(re.sub(r"\s", "", sales_costs_str))

    sales_quality=None
    el_sales_quality=jx_html.xpath('//div[text()="Level"]/../div[3]//text()')
    if el_sales_quality:
        sales_quality_str=el_sales_quality[0]
        sales_quality_str=sales_quality_str.replace('of 500 pts','').strip()
        sales_quality=try_to_int(re.sub(r"\s", "", sales_quality_str))

    unresolved_discussions=None
    el_unresolved_discussions=jx_html.xpath('//output[@id="Unresolved Discussions"]/text()')
    if el_unresolved_discussions:
        unresolved_discussions=try_to_int(re.sub(r"\s", "", el_unresolved_discussions[0]))
    
    on_time_dispatch=None
    el_on_time_dispatch=jx_html.xpath('//output[@id="On-time dispatch"]/text()')
    if el_on_time_dispatch:
        on_time_dispatch_str=el_on_time_dispatch[0]
        on_time_dispatch_str=on_time_dispatch_str.replace('%','')
        on_time_dispatch=try_to_float(re.sub(r"\s", "", on_time_dispatch_str))
        if on_time_dispatch:
            on_time_dispatch=round(on_time_dispatch/100,4)
    
    reconmended_ratings=None
    el_reconmended_ratings=jx_html.xpath("//output[@id='\"Recommended\" ratings']/text()")
    if el_reconmended_ratings:

        reconmended_ratings=try_to_float(re.sub(r"\s", "", el_reconmended_ratings[0]))
    
    not_reconmended_ratings=None
    el_not_reconmended_ratings=jx_html.xpath("//output[@id='\"Not recommended\" ratings']/text()")
    if el_not_reconmended_ratings:
        not_reconmended_ratings=try_to_float(re.sub(r"\s", "", el_not_reconmended_ratings[0]))
    pcount=await get_alegro_total_count(page,basic_info)

    homeinfo_dict={
        'DpStatus':dp_status,
        'Available_funds':available_funds,
        'Settlements_with_Allegro':settlements_with_allegro,
        'Summary_of_costs':summary_of_costs,
        'Sales_value':sales_value,
        'Sales_costs':sales_costs,
        'Sales_quality':sales_quality,
        'Unresolved_discussions':unresolved_discussions,
        'On_time_dispatch':on_time_dispatch,
        'Reconmended_ratings':reconmended_ratings,
        'Not_Reconmended_ratings':not_reconmended_ratings,
        'pcount':pcount
    }
    
    logging.info(f'店铺:《{dpName}》,{bid},成功获取sellerId:{sellerId},店铺状态:{dp_status}')

    api_url='https://edge.salescenter.allegro.com/sellers/dashboard/seller-statistics/offer-views/last-7-days'
    json_data= await js_req_bol(page,api_url)
    if not json_data:
        logging.info(f'店铺:《{dpName}》,{bid},采集首页获取7天流量失败')
        return 0
    j=1
    for view_part in json_data['current']['parts']:
        homeinfo_dict[f'views_day{j}']=view_part['pageViews']
        j+=1
        if j>7:
            break
    logging.info(f'店铺:《{dpName}》,{bid},成功获取七天流量')
    #print(homeinfo_dict)
    
    cur_time=datetime.datetime.now() 
    zds=['sellerId','bid','BName','GroupName','userName','SJBM','YYBM']
    params=[sellerId,bid,dpName,gname,userName,sjbm,yybm]
    zds=zds+list(homeinfo_dict.keys())
    params=params+list(homeinfo_dict.values())
    rs=tms.ExecQuery('select count(*) from ALHome where sellerId=?',(sellerId,))
    if rs[0][0]==0:
        zds.extend(['upload_time','update_time'])
        params.extend([cur_time,cur_time])
        zdstr=','.join(zds)
        wstr=','.join(['?' for _ in params])
        aff=tms.ExecNoQuery(f'insert into ALHome({zdstr}) values ({wstr})',params)
    else:
        zds.append('update_time')
        params.append(cur_time)
        params.append(sellerId)
        zdstr=','.join([f'{zd}=?' for zd in zds])
        aff=tms.ExecNoQuery(f'update ALHome set {zdstr} where sellerId=?',params)
    
    return aff

async def update_ALOrder(page:Page,basic_info):

    sellerId=basic_info['sellerId']
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']


    res_data=await js_bol_get_orders(page,basic_info)
    if not res_data:
        logging.info(f'店铺:《{dpName}》,{bid},js请求订单信息失败')
        return 0
    
    orders=res_data['orders']
    totalCount=res_data['totalCount']
    logging.info(f'店铺:《{dpName}》,{bid},一共有 {totalCount} 个订单,已获取 {len(orders)} 个')

    aff=0
    order_cols=[col.lower() for col in tms.GetCol('ALOrder')]
    suc=0
    isMutilOffer=0
    for order_data in orders:
        pros=copy.copy(order_data['lineItems'])
        if len(pros)>1:
            isMutilOffer=1
        for pjj in range(len(pros)):
            zds=['bid','BName','GroupName','userName','SJBM','YYBM']
            paras=[bid,dpName,gname,userName,sjbm,yybm]
            order_data['lineItems']=[pros[pjj]]
            flat_dict = flatten(order_data, reducer="underscore",enumerate_types=(list,))
            flat_dict['order_id']=flat_dict['id']
            cur_order_id=flat_dict.pop('id')
            flat_dict['isMutilOffer']=isMutilOffer
            if 'lineItems_0_offer_externalId' in flat_dict:
                if not flat_dict['lineItems_0_offer_externalId']:
                    flat_dict.pop('lineItems_0_offer_externalId')
            logging.info(f"店铺:《{dpName}》,{bid},订单《{cur_order_id}》订单产品数量为:{len(pros)}")


                
            for k,v in flat_dict.items():
                if k.lower() in order_cols:
                    zds.append(k)
                    if isinstance(v,bool):
                        para_v=1 if v else 0
                    elif isinstance(v,str) and is_decimal(v):
                        para_v=float(v)
                    else:
                        para_v=v
                    paras.append(para_v)
            
            aff=tms.merge_into('ALOrder',zds,paras,['order_id','lineItems_0_offer_id'],True)
            suc+=aff
            logging.info(f'店铺:《{dpName}》,{bid},存入订单《{cur_order_id}》,状态:{aff}')
    
    return 1

async def update_ALBuyerMes(page:Page,basic_info):

    sellerId=basic_info['sellerId']
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']


    mes_list=await js_bol_get_mes(page,basic_info)
    if mes_list is None:
        logging.info(f'店铺:《{dpName}》,{bid},js请求买家消息失败')
        return 0
    
    last_buy_list=[mes for mes in mes_list if mes['lastMessage']['author']['isInterlocutor']]
    logging.info(f' 店铺:《{dpName}》,{bid},成功采集 {len(mes_list)}个 最后消息,其中 {len(last_buy_list)} 个消息需要回复')

    aff=0
    suc=0
    for mes in mes_list:
        zds=['sellerId','bid','BName','GroupName','userName','SJBM','YYBM']
        params=[sellerId,bid,dpName,gname,userName,sjbm,yybm]
        formatted_time=None
        try:
            last_mes_dt = datetime.datetime.strptime(mes['lastMessageDateTime'], "%Y-%m-%dT%H:%M:%S.%fZ")
            formatted_time = last_mes_dt.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
        except Exception as e:
            logging.error(f'转化最后消息时间格式错误 => {e}')
        mes_id=mes['id']
        mdata={
            'message_id':mes_id,
            'is_read':mes['read'],
            'interlocutor_id':mes['interlocutor']['id'],
            'interlocutor_login':mes['interlocutor']['login'],
            'interlocutor_avatarUrl':mes['interlocutor']['avatarUrl'],
            'interlocutor_url':f"https://allegro.pl{mes['interlocutor']['url']}",
            'lastMessage_author_id':mes['lastMessage']['author']['id'],
            'lastMessage_text':mes['lastMessage']['text'],
            'lastMessage_isInterlocutor':mes['lastMessage']['author']['isInterlocutor'],
            'lastMessageDateTime':formatted_time
        }
        zds=zds+list(mdata.keys())
        params=params+list(mdata.values())

        aff= tms.merge_into('ALMessage',zds,params,['message_id'],True)
        suc+=aff
        logging.info(f'店铺:《{dpName}》,{bid},更新买家消息《{mes_id}》,状态:{aff}')

    return 1

async def update_ALDiscussion(page:Page,basic_info):

    sellerId=basic_info['sellerId']
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']


    cots=await js_bol_get_discussions(page,basic_info)
    if cots is None:
        logging.info(f'店铺:《{dpName}》,{bid},js请求纠纷信息失败')
        return 0
    
    logging.info(f'店铺:《{dpName}》,{bid},一共采集了 {len(cots)} 个纠纷信息')

    aff=0
    order_cols=[col.lower() for col in tms.GetCol('ALDiscussion')]
    suc=0
    for cot in cots:
        zds=['seller_id','bid','BName','GroupName','userName','SJBM','YYBM']
        paras=[sellerId,bid,dpName,gname,userName,sjbm,yybm]
        flat_dict = flatten(cot, reducer="underscore",enumerate_types=(list,))
        flat_dict['discussion_id']=flat_dict['id']
        cur_order_id=flat_dict.pop('id')
        for k,v in flat_dict.items():
            if k.lower() in order_cols:
                zds.append(k)
                if isinstance(v,bool):
                    para_v=1 if v else 0
                elif isinstance(v,str) and is_decimal(v):
                    para_v=float(v)
                else:
                    para_v=v
                paras.append(para_v)
        
        aff=tms.merge_into('ALDiscussion',zds,paras,['discussion_id'],True)
        suc+=aff
        logging.info(f'店铺:《{dpName}》,{bid},存入纠纷《{cur_order_id}》,状态:{aff}')

    
    return 1

async def js_req_search_count(page:Page,kw,pid,camp_id=None):

    api_url='https://seller.shopee.co.id/api/pas/v1/setup_helper/search_keyword/'
    spdc,ck=await get_ck_and_spc_cds(page)
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }
    data={
        "keyword":kw,
        "campaign_type":"product",
        "item_id":int(pid),
        "suggest_log_data":{"page":"suggest_creation","campaign_id":None}
    }
    if camp_id is not None:
        data['suggest_log_data']={"campaign_id":int(camp_id),"page":"suggest_after_creation"}

    js_data=await make_request(page,api_url,params,data)
    if js_data['msg']=='OK':
        listdata=js_data['data']
        cjkw=listdata[0]['keyword']
        cur_searchcount=listdata[0]['search_volume']
        logging.info(f'《{kw}》 搜索到:《{cjkw}》,采集搜索量为:{cur_searchcount}')
        return cur_searchcount
    else:
        logging.info(js_data)

async def js_get_xp_stay_pro(page:Page,basic_info,stay=10):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    api_url='https://seller.shopee.co.id/api/v3/mpsku/list/v2/get_product_list'
    cs_pros={}
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 48,
        'list_type': 'live_all',
        'need_ads': True
    }

    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    stay_time=86400*stay
    cur_time=int(time.time())
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},错误请求超过三次弹出错误')
            raise
        p=req_p.get()
        params['page_number']=p
        js_data=await make_request(page,api_url,params)
        if js_data is None:
            err_count+=1
            await asyncio.sleep(5)
            await repeat_reload(page)
            req_p.put(p)
            continue

        page_info=js_data['data']['page_info']
        if page_info['total']==0:
            return (0,cs_pros)
        
        pros=js_data['data']['products']
        if p==1:
            pz=page_info['page_size']
            #print(page_info)
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        for pro in pros:
            try:
                pro_itemid=str(pro['id'])
                pro_create_time=pro['create_time']
                pro_min_price=int(float(pro['price_detail']['price_min']))
                pro_ph_itemid=pro['parent_sku']
                if cur_time-pro_create_time>stay_time:
                    cs_pros[pro_itemid]=[pro_itemid,pro_ph_itemid,pro_min_price]
            except Exception as e:
                traceback.print_exc()
        
        # with open('JSONDATA/虾皮发布多天产品.json','w',encoding='utf-8') as f:

        #     json.dump(cs_pros,f,ensure_ascii=False,indent=4)
    
    return (max_p,cs_pros)

async def update_ad_tuo_zhan_kw(page:Page,basic_info,pid,camp_id):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']

    update_count=0

    sql_tzc='''
                select Search_Query,Search_volume from YN_AD_CSV where product_id=? 
                and Search_volume>=50 and wordnum is not null
                and is_added=0 and is_edited=0
            '''
    rs_tzc=tms.ExecQuery(sql_tzc,(pid,))
    logging.info(f'店铺:《{dpName}》,{bid},拓展词需要更新的搜索量有:{len(rs_tzc)}个')
    for kw,old_scount in rs_tzc:
        scount= await js_req_search_count(page,kw,pid,camp_id)
        if scount is not None:
            aff=tms.ExecNoQuery('update YN_AD_CSV set Search_volume=? where Search_Query=?',(scount,kw))
            logging.info(f'店铺:《{dpName}》,{bid},更新了关键词《{kw}》搜索量 {old_scount} => {scount},状态：{aff}')
            update_count+=1
        else:
            logging.info(f'店铺:《{dpName}》,{bid},关键词《{kw}》搜索量采集失败')


    return update_count

async def get_xp_draft_pro(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    prarms={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 100,
        'qc_status': 'all'
    }
    api_url='https://seller.shopee.co.id/api/v3/mpsku/list/v2/get_draft_product_list'

    js_data= await make_request(page,api_url,prarms)

    print(js_data)
    with open('JSONDATA/虾皮草稿箱产品.json','w',encoding='utf-8') as f:

        json.dump(js_data,f,ensure_ascii=False,indent=4)

async def js_publish_xp_darft_pro(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)

    api_url='https://seller.shopee.co.id/api/tool/mass_product/publish_product/'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2
    }

    data={"unpublished_ids":[1310101095],"unlisted":False}
    
    js_data=await make_request(page,api_url,params,data)
    print(js_data)

    with open('JSONDATA/虾皮草稿箱上架结果.json','w',encoding='utf-8') as f:

        json.dump(js_data,f,ensure_ascii=False,indent=4)

async def js_get_xp_prolist(page:Page,basic_info,list_type='live_all'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    req_p=Queue(-1)
    req_p.put(1)
    err_count=0
    spdc,ck=await get_ck_and_spc_cds(page)


    api_url=f'https://{host}/api/v3/opt/mpsku/list/v2/get_product_list'
    params={
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'page_number': 1,
        'page_size': 48,
        'list_type': list_type,
        'need_ads': True
    }
    pro_list=[]
    while True:
        if req_p.empty():
            break
        if err_count>3:
            logging.info(f'店铺:《{dpName}》,{bid},获取产品错误请求超过三次弹出错误')
            return
        p=req_p.get()
        params['page_number']=p
        js_data=await make_request(page,api_url,params)
        if js_data is None:
            err_count+=1
            await asyncio.sleep(5)
            await repeat_reload(page)
            req_p.put(p)
            continue

        page_info=js_data['data']['page_info']
        if page_info['total']==0:
            return []
        
        pros=js_data['data']['products']
        if p==1:
            pz=page_info['page_size']
            ptoal=page_info['total']
            max_p=math.ceil(ptoal/pz)
            for ppp in range(2,max_p+1):
                req_p.put(ppp)
        
        pro_list.extend(pros)
    
    logging.info(f'店铺:《{dpName}》,{bid},{list_type},共 {max_p} 页,获取 {len(pro_list)} 个产品')
    return pro_list

async def get_ad_conversion(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    country=basic_info['Country']
    sq=7
    if country in ['马来西亚','新加坡']:
        sq=8
    st_time,ed_time=get_shopee_timestamp(sq=sq)

    offset=0
    limit=500
    campaign_dict={}
    api_url=f'https://{host}/api/pas/v1/homepage/query/'
    params=await get_params(page,basic_info)

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"all","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await make_request(page,api_url,params,data)
    
    total_count=json_data['data']['total']
    entry_list=json_data['data']['entry_list']

    for entry in entry_list:
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement!='targeting':
            item_id=str(entry['manual_product_ads']['item_id'])
            direct_order_amount=entry['report']['broad_order_amount']
            direct_roi=entry['report']['direct_roi']
            if not campaign_dict.get(item_id):
                campaign_dict[item_id]=(direct_order_amount,direct_roi)

    logging.info(f'店铺:《{dpName}》,bid:{bid},订单映射:{len(campaign_dict.keys())}个,历史广告总数:{total_count}个')
    return campaign_dict

def update_click_task(basic_info,pro):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    sjbm=basic_info['SJBM']
    shopid=basic_info.get('shopid')

    pro_itemid=str(pro['id'])
    campaign_id=str(pro['boost_info'].get('campaign_id'))
    parent_sku=pro['parent_sku']
    pname=pro['name']
    pimage=f"https://down-id.img.susercontent.com/file/{pro['cover_image']}"
    pro_create_time=timestamp_to_sql_datetime(pro['create_time'])
    pro_modify_time=timestamp_to_sql_datetime(pro['modify_time'])
    pro_min_price=int(float(pro['price_detail']['price_min']))
    pro_max_price=int(float(pro['price_detail']['price_max']))
    ad_url=None
    if campaign_id:
        ad_url=f'https://seller.shopee.co.id/portal/marketing/pas/product/manual/{campaign_id}'

    pdata={
        'ItemID':pro_itemid,
        'campaign_id':campaign_id,
        'parent_sku':parent_sku,
        'shopid':shopid,
        'pname':pname,
        'purl':f'https://shopee.co.id/product/{shopid}/{pro_itemid}',
        'ad_url':ad_url,
        'pimage':pimage,
        'price_min':pro_min_price,
        'price_max':pro_max_price,
        'modify_time':pro_modify_time,
        'create_time':pro_create_time,
        'stock':pro['stock_detail']['total_available_stock'],
        'bid':bid,
        'BName':dpName,
        'GName':gname,
        'userName':userName,
        'SJBM':sjbm
    }
    rs= tms.ExecQuery('select pname from Click_Task where ItemID=?',(pro_itemid,))
    aff_del_kw=0
    aff_p=0
    if len(rs)==0:
        pdata['add_type']='新增数据'
        zds=list(pdata.keys())
        paras=list(pdata.values())
        aff_p=tms.merge_into('Click_Task',zds,paras,['ItemID'],True)
    else:
        old_pname=rs[0][0]
        if old_pname!=pname:
            pdata['add_type']='标题变化更新'
            pdata['is_cj_kw']=0
            pdata['is_split_pname']=0
            zds=list(pdata.keys())
            paras=list(pdata.values())
            aff_del_kw=tms.ExecNoQuery('delete from Click_Task_KW where itemid=?',(pro_itemid,))
            aff_p=tms.merge_into('Click_Task',zds,paras,['ItemID'],True)
        else:
            pdata['add_type']='已存在数据忽略'
    logging.info(f"店铺:《{dpName}》,{bid},itemid:{pro_itemid},{pdata['add_type']}:{aff_p},删除无效下拉词:{aff_del_kw}")

    return aff_p

async def update_XPOrder(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    host=basic_info['host']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    country=basic_info['Country']
    oid_list_url=f'https://{host}/api/v3/order/search_order_list_index'
    params=await get_params(page,basic_info)

    req_order_list_que=Queue(-1)
    req_order_list_que.put(1)
    err_count=0
    order_param_list=[]
    itemid_adc_dict=await get_ad_conversion(page,basic_info)
    while True:
        if req_order_list_que.empty():
            break
        if err_count >3:
            logging.info(f'店铺:《{dpName}》,{bid},获取oid接口错误超过3次')
            return
        page_number=req_order_list_que.get()
        from_page_number=page_number-1 if page_number>1 else 1
        oid_list_data = {
            "order_list_tab":100,
            "entity_type":1,
            "pagination":
                {"from_page_number":from_page_number,
                "page_number":page_number,
                "page_size":40},
            "filter":
                {"fulfillment_type":0,"is_drop_off":0,"fulfillment_source":0,"action_filter":0},
            "sort":{"sort_type":3,"ascending":False}
        }

        js_data=await make_request(page,oid_list_url,params,oid_list_data)

        if js_data is None:
            err_count+=1
            await repeat_reload(page)
            await asyncio.sleep(6)
            req_order_list_que.put(page_number)
            continue

        if js_data['code']!=0:
            err_count+=1
            await repeat_reload(page)
            await asyncio.sleep(6)
            req_order_list_que.put(page_number)
            continue
        
        oid_list=js_data['data']['index_list']
        if len(oid_list)==0:
            break

        if page_number==1:
            total_count=js_data['data']['pagination']['total']
            max_p=math.ceil(total_count/40)
            for ppp in range(2,max_p+1):
                req_order_list_que.put(ppp)
            logging.info(f'店铺:《{dpName}》,{bid},共 {total_count} 个订单,需请求 {max_p} 页')
        
        order_param_list.extend(oid_list)


    print(f'店铺:《{dpName}》,{bid},已获取 {len(order_param_list)} 个订单ID,开始根据获取订单卡片信息...')

    if not order_param_list:
        return 0
    
    shopid=order_param_list[0]['shop_id']
    order_card_url=f'https://{host}/api/v3/order/get_order_list_card_list'

    order_info_list=[]
    suc=0
    for j in range(math.ceil(len(order_param_list)/5)):
        cur_order_param_list=order_param_list[j*5:(j+1)*5]
        for jj in range(3):
            order_card_data={
                        "order_list_tab":100,
                        "need_count_down_desc":True,
                        "order_param_list":cur_order_param_list
                        }
            
            js_data_order=await make_request(page,order_card_url,params,order_card_data)
            if js_data_order and js_data_order['code']==0:
                break
        if js_data_order is None:
            logging.info(f'店铺:《{dpName}》,{bid},获取订单卡片信息错误超过3次')
            return
        card_list=js_data_order['data']['card_list']
        
        for card in card_list:
            try:
                card_status=1
                order_card= card.get('order_card')
                if not order_card:
                    order_card=card.get('package_level_order_card')
                    card_status=2
                
                order_id=order_card['order_ext_info']['order_id']
                buyer_id=order_card['order_ext_info'].get('buyer_user_id')
                order_sn=order_card['card_header']['order_sn']
                buyer_name=order_card['card_header']['buyer_info'].get('username')
                date_str = order_sn[:6]
                order_date = datetime.datetime(int(date_str[:2]) + 2000 , int(date_str[2:4]) , int(date_str[4:]))
                if card_status==1:
                    total_price=order_card['payment_info']['total_price']
                    payment_method=order_card['payment_info']['payment_method']
                    order_status=order_card['status_info']['status']
                    order_status_des=order_card['status_info']['status_description'].get('description_value')
                    fulfilment_info=order_card['fulfilment_info']
                    item_info_list=order_card['item_info_group']['item_info_list']
                else:
                    total_price=order_card['package_list'][0]['payment_info']['total_price']
                    payment_method=order_card['package_list'][0]['payment_info']['payment_method']
                    order_status=order_card['package_list'][0]['status_info']['status']
                    order_status_des=order_card['package_list'][0]['status_info']['status_description'].get('description_value')
                    fulfilment_info=order_card['package_list'][0]['fulfilment_info']
                    item_info_list=order_card['package_list'][0]['item_info_group']['item_info_list']

                
                ship_type=''
                ship_count=None
                ship_id=None
                
                ship_name_list=[]
                for fkk,fkv in fulfilment_info.items():
                    if fkk.endswith('_name'):
                        ship_name_list.append(fkv)
                    if fkk=='tracking_number_list':
                        ship_id=fkv[0]
                        ship_count=len(fkv)

                if ship_name_list:
                    ship_type='\n'.join(ship_name_list)

                

                for ilist in item_info_list:
                    
                    pitem=ilist['item_list'][0]
                    pname=pitem['name']
                    random_code=None
                    rcode_mat= re.search(r'.*\|(.{8})\|$',pname)
                    if rcode_mat:
                        random_code=rcode_mat.group(1)
                    sku_name=pitem.get('description')
                    pimage=f"{IMG_HOST_MAP[country]}{pitem['image']}"
                    pamount=pitem['amount']
                    itemid=pitem['inner_item_ext_info']['item_id']
                    purl=f"https://{host.replace('seller.','')}/product/{shopid}/{itemid}/"
                    skuid=pitem['inner_item_ext_info']['model_id']
                    ad_paras=itemid_adc_dict.get(str(itemid))
                    ad_cd=None
                    ad_ROAS=None
                    if ad_paras:
                        ad_cd,ad_ROAS=ad_paras
                    cur_order_info={
                        'shopid':str(shopid),
                        'order_id':str(order_id),
                        'order_sn':order_sn,
                        'buyer_name':buyer_name,
                        'buyer_id':str(buyer_id),
                        'ItemID':str(itemid),
                        'skuid':str(skuid),
                        'purl':purl,
                        'pname':pname,
                        'sku_name':sku_name,
                        'pimage':pimage,
                        'amount':pamount,
                        'total_price':int(total_price/100000),
                        'payment_method':payment_method,
                        'status':order_status,
                        'status_des':order_status_des,
                        'ship_id':ship_id,
                        'ship_type':ship_type,
                        'ship_count':ship_count,
                        'order_date':order_date,
                        'bid':bid,
                        'BName':dpName,
                        'GroupName':gname,
                        'userName':userName,
                        'SJBM':sjbm,
                        'YYBM':yybm,
                        'conversions':ad_cd,
                        'ROAS':ad_ROAS,
                        'random_code':random_code,
                        'country':basic_info['Country']

                    }

                    order_info_list.append(cur_order_info)
                    zds=list(cur_order_info.keys())
                    paras=list(cur_order_info.values())
                    aff=tms.merge_into('ShopeeOrder',zds,paras,['order_id','skuid'],True)
                    suc+=aff
                    #logging.info(f'店铺:《{dpName}》,{bid},订单:《{order_sn}》,skuid:{skuid},随机码:{random_code},存入状态:{aff},共更新:{suc}')
                    

            except Exception as e:
                traceback.print_exc()
                logging.info(f'店铺:《{dpName}》,{bid},解析错误 => {e}')
        
    logging.info(f'店铺:《{dpName}》,{bid},获取 {len(order_info_list)} 个订单信息,保存 {suc} 个')
    return suc

async def update_XPStock(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    host=basic_info['host']
    country=basic_info['Country']
    shopid= await js_req_shopid(page,basic_info)
    pros=await js_get_xp_prolist(page,basic_info)

    if pros is None:
        return
    
    aff=0
    aff_hw=0
    aff_profit=0
    camp_count=0
    aff_profit_camp_3m=0
    psku_list=[]
    cur_time=datetime.datetime.now()
    camp_list=await get_entry_list(page,basic_info,is_contain_sold_out=True)
    
    pid_acos_map={str(camp['manual_product_ads']['item_id']):round(camp['report']['direct_cir'],2) for camp in camp_list}

    for pro in pros:

        try:
            pro_itemid=str(pro['id'])
            pname=pro['name']
            pimage=f"{IMG_HOST_MAP[country]}{pro['cover_image']}"
            pro_create_time=timestamp_to_sql_datetime(pro['create_time'])
            pro_modify_time=timestamp_to_sql_datetime(pro['modify_time'])
            pro_min_price=int(float(pro['price_detail']['price_min']))
            pro_ph_itemid=pro['parent_sku']
            
            if pid_acos_map.get(pro_itemid) is not None:
                camp_count+=1
                aff_profit_camp_3m+=tms.ExecNoQuery('update ProductProfit set acos_3months_id=? where rucangItemID=?',(pid_acos_map.get(pro_itemid),pro_itemid))
                

            sku_infos=pro['model_list']

            for skuinfo in sku_infos:
                sku_image=None
                origin_price=skuinfo['price_detail']['origin_price']
                promotion_price=skuinfo['price_detail']['promotion_price']
                hwid=skuinfo['sku']
                skuprice=int(float(origin_price)) if promotion_price=='0.00' else int(float(promotion_price))
                if skuinfo['image']:
                    sku_image=f"{IMG_HOST_MAP[country]}{skuinfo['image']}"

                if hwid.lower().startswith('sea'):

                    cur_hw_pdata={
                        'haiwaicangxitongbianma':hwid,
                        'rucangItemID':pro_itemid,
                        'rucangSKUID':str(skuinfo['id']),
                        'bid':bid,
                        'BName':dpName,
                        'gName':gname,
                        'shopid':shopid,
                        'SJBM':sjbm,
                        #'yunyingbianma':yybm,
                        'pname':pname,
                        'sku_name':skuinfo['name'],
                        'sku_img':sku_image
                        
                    }
                    hw_zds=list(cur_hw_pdata.keys())
                    hw_paras=list(cur_hw_pdata.values())
                    aff_hw+=tms.merge_into('Purchase_Sales_Warehouse',hw_zds,hw_paras,['haiwaicangxitongbianma'],True)

                cur_pro_sku={
                    'shopid':shopid,
                    'ItemID':pro_itemid,
                    'skuid':str(skuinfo['id']),
                    'purl':f"https://{host.replace('seller.','')}/product/{shopid}/{pro_itemid}/",
                    'pname':pname,
                    'sku_name':skuinfo['name'],
                    'pimage':pimage,
                    'sku_image':sku_image,
                    'sold_count':skuinfo['statistics']['sold_count'],
                    'price':skuprice,
                    'stock':skuinfo['stock_detail']['total_available_stock'],
                    'modify_time':pro_modify_time,
                    'create_time':pro_create_time,
                    'ph_itemid':pro_ph_itemid,
                    'ph_skuid':skuinfo['sku'],
                    'bid':bid,
                    'BName':dpName,
                    'GroupName':gname,
                    'userName':userName,
                    'SJBM':sjbm,
                    'YYBM':yybm,
                    'country':country
                }
                zds=list(cur_pro_sku.keys())
                paras=list(cur_pro_sku.values())
                aff+=tms.merge_into('ShopeeStock',zds,paras,['ItemID','skuid'],True)
                psku_list.append(cur_pro_sku)

                ccc_skuid=str(skuinfo['id'])
                aff_profit+=tms.ExecNoQuery('update ProductProfit set sale_price_id=? where rucangSKUID=?',(skuprice,ccc_skuid))

        except Exception as e:
            traceback.print_exc()
            logging.error(f'json解析虾皮产品列表数据错误 => {e}')

    logging.info(f'店铺:《{dpName}》,{bid},共 {len(pros)} 个产品,{len(psku_list)}个sku,更新{aff}个sku库存,\n{aff_hw}个海外仓,{aff_profit}个利润表价格,{camp_count}个广告产品,{aff_profit_camp_3m}个acos')

    return aff
        
async def js_get_xp_adlist_keyword(page:Page,camid):

    spdc,ck=await get_ck_and_spc_cds(page)
    params={'SPC_CDS':spdc,'SPC_CDS_VER':2}
    api_url='https://seller.shopee.co.id/api/pas/v1/product/manual/list_keyword_with_recommended_price/'
    post_data={"campaign_id":int(camid),"need_recommended_price":True,"header":{}}
    kw_list=None
    for jjj in range(3):
        json_data= await make_request(page,api_url,params,post_data)
        if json_data:
            kw_kprs=json_data.get('data')
            #print(camid)
            if kw_kprs is not None:
                kw_list=[kitem['keyword'] for kitem in kw_kprs]
                break
        else:
            await repeat_reload(page)
            asyncio.sleep(6)
    
    return kw_list

async def js_get_adkw_with_rpt(page:Page,basic_info,camid):

    spdc,ck=await get_ck_and_spc_cds(page)
    params={'SPC_CDS':spdc,'SPC_CDS_VER':2}
    api_url='https://seller.shopee.co.id/api/pas/v1/product/manual/list_keyword_with_recommended_price/'
    post_data={"campaign_id":int(camid),"need_recommended_price":True,"header":{}}
    kw_reports={}
    json_data= await make_request(page,api_url,params,post_data)
    if json_data:
        kw_kprs=json_data.get('data')
        if kw_kprs:
            for kw_kpr in kw_kprs:
                if kw_kpr['keyword']['state']=='active':
                    cur_kw_info={}
                    curkw=kw_kpr['keyword']['keyword']
                    cur_kw_info['bid_price']=kw_kpr['keyword']['bid_price']/100000
                    cur_kw_info['match_type']=kw_kpr['keyword']['match_type']
                    cur_kw_info['state']=kw_kpr['keyword']['state']
                    kw_reports[curkw]=cur_kw_info
    else:
        return

    st_time,ed_time=get_shopee_timestamp()
    api_url='https://seller.shopee.co.id/api/pas/v1/report/get/'
    post_data={"start_time":st_time,"end_time":ed_time,"campaign_type":"product",
            "agg_type":"keyword","filter_params":{"campaign_id":int(camid)},"need_ratio":True,"header":{}}
    json_data= await make_request(page,api_url,params,post_data)
    if json_data:
        if camid=='311785048':
            with open('测试数据55.json','w',encoding='utf-8') as f:
                json.dump(json_data,f,ensure_ascii=False)
        kw_rs=[]
        kw_zbs=json_data.get('data')
        if kw_zbs:
            for kw_zb in kw_zbs:
                curkw=kw_zb['key']
                if kw_reports.get(curkw):
                    cur_kw_info=kw_reports[curkw]
                    cur_kw_info['CTR']=kw_zb['metrics']['ctr']
                    cur_kw_info['Clicks']=kw_zb['metrics']['click']
                    cur_kw_info['cost']=kw_zb['metrics']['cost']/100000
                    cur_kw_info['impression']=kw_zb['metrics']['impression']
                    cur_kw_info['direct_gmv']=kw_zb['metrics']['direct_gmv']/100000
                    cur_kw_info['direct_order_amount']=kw_zb['metrics']['direct_order_amount']
                    cur_kw_info['direct_cir']=kw_zb['metrics']['direct_cir']
                    kw_reports[curkw]=cur_kw_info
                else:
                    if kw_zb['metrics']['direct_order_amount']>0:
                        kw_rs.append(['add',curkw,300])

            aff_rs=0
            for krs_item in kw_rs:

                res_rs=await js_mass_edit_kw(page,basic_info,camid,krs_item)
                if res_rs and res_rs['code']==0:
                    logging.info(f'词:《{krs_item[1]}》恢复成功 => {res_rs}')
                    aff_rs+=1
                else:
                    logging.error(f'词:《{krs_item[1]}》恢复错误 => {res_rs}')
            logging.info(f'广告:{camid},需恢复出单词:{len(kw_rs)}个,成功:{aff_rs}个')

                    
    else:
        return

    return kw_reports

async def update_XPcamp(page:Page,basic_info,entry):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    shopid= basic_info['shopid']
    camid=str(entry['campaign']['campaign_id'])
    itemid=str(entry['manual_product_ads']['item_id'])
    
    kw_list=await js_get_xp_adlist_keyword(page,camid)

    if kw_list is None:
        logging.info(f'店铺:《{dpName}》,{bid},获取广告词列表失败')
        return


    kw_active_e=[]
    kw_active_b=[]
    kw_deleted=[]
    cur_time=datetime.datetime.now()
    for kitem in kw_list:

        kw_state=kitem['state']
        kw_type=kitem['match_type']
        if kw_state=='active':
            if kw_type=='exact':
                kw_active_e.append(copy.copy(kitem))
            elif kw_type=='broad':
                kw_active_b.append(copy.copy(kitem))
        else:
            kw_deleted.append(copy.copy(kitem))

    sqlstr=f'select * FROM [campaign] where campaign_id={camid}'
    rs= tms.ExecQuerydict(sqlstr)
    if len(rs)>0:
        old_data=rs[0]
        pdata_dict={
            'shopid':shopid,
            'gname':gname,
            'SJBM':sjbm,
            'YYBM':yybm,
            'UserName':userName,
            'active_broad_day1':old_data['active_broad_day2'],
            'active_broad_day2':old_data['active_broad_day3'],
            'active_broad_day3':len(kw_active_b),
            'active_exact_day1':old_data['active_exact_day2'],
            'active_exact_day2':old_data['active_exact_day3'],
            'active_exact_day3':len(kw_active_e),
            'active_day1':old_data['active_day2'],
            'active_day2':old_data['active_day3'],
            'active_day3':len(kw_active_b)+len(kw_active_e),
            'deleted_day1':old_data['deleted_day2'],
            'deleted_day2':old_data['deleted_day3'],
            'deleted_day3':len(kw_deleted),
            'kwstate_update_time':cur_time,
            'campaign_id':camid
        }

        zds=list(pdata_dict.keys())
        paras=list(pdata_dict.values())
        aff= tms.merge_into('campaign',zds,paras,['campaign_id'])
        return aff

    
    else:
        logging.info(f'店铺:《{dpName}》,{bid},表《campaign》未保存广告itemid:{itemid},campaign_id:{camid}')

async def get_kw_active(page:Page,cam_id):

    kw_list=await js_get_xp_adlist_keyword(page,cam_id)
    kw_acitve=[]
    for kitem in kw_list:
        kw_state=kitem['state']
        kw_type=kitem['match_type']
        kw=kitem['keyword']
        if kw_state=='active':
            kw_acitve.append(kitem)
    return kw_acitve

async def add_kw_to_adtask(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    shopid= basic_info['shopid']
    if not shopid:
        shopid=await js_req_shopid(page,basic_info)
    campaign_dict=await get_camps(page,basic_info,is_del_task=True)
    today_date=datetime.date.today().strftime('%Y-%m-%d')
    pj=1

    for cur_itemid,cur_campaign_id in campaign_dict.items():
        
        kw_list=await js_get_xp_adlist_keyword(page,cur_campaign_id)
        kw_active_e=[]
        kw_active_b=[]
        kw_deleted=[]
        for kitem in kw_list:
            kw_state=kitem['state']
            kw_type=kitem['match_type']
            kw=kitem['keyword']
            if kw_state=='active':
                if kw_type=='exact':
                    kw_active_e.append(kw)
                elif kw_type=='broad':
                    kw_active_b.append(kw)
            else:
                kw_deleted.append(kw)
        
        need_count=200-len(kw_active_e)-len(kw_active_b)
        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},已有精准词:{len(kw_active_e)},广泛词:{len(kw_active_b)},还可新增:{need_count}')
        kw_need_add=[]
        kw_need_edit=[]
        ad_basic={
            'shopid':shopid,
            'bid':bid,
            'BName':dpName,
            'GroupName':gname,
            'SJBM':sjbm,
            'YYBM':yybm,
            'UserName':userName,
            'password':password,
            'product_id':cur_itemid,
            'campaign_id':cur_campaign_id,
            'ad_url':f'https://seller.shopee.co.id/portal/marketing/pas/product/manual/{cur_campaign_id}?group=last_three_month',
            'purl':f'https://shopee.co.id/product/{shopid}/{cur_itemid}/',
            'task_date':today_date
        }
        sqlstr1='select Search_Query,Product_Name_Ad_Name,Search_Volume from YN_AD_CDC where Product_ID=? and task_date is null'
        rs1=tms.ExecQuery(sqlstr1,(cur_itemid,))
        aff_add=0
        aff_edit=0
        aff_del=0
        aff_ign=0        
        for ys_kw,pname,scount in rs1:
            cur_info=copy.copy(ad_basic)
            cur_kw=re.sub(r'[%.,/&()+-]',' ',ys_kw)
            cur_kw=re.sub(r'\s+', ' ', cur_kw).strip()
            cur_info['Search_Query']=cur_kw
            cur_info['Product_Name_Ad_Name']=pname
            cur_info['kw_type']='出单词表'
            cur_info['search_count']=scount
            if cur_kw in kw_active_e:
                aff_ign+=tms.ExecNoQuery('update YN_AD_CDC set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
            elif cur_kw in kw_active_b:
                kw_need_edit.append(cur_kw)
                cur_info['action_type']='修改'
                cur_info['action_des']='在出单词表,将匹配方式从 broad => exact'
                zds=list(cur_info.keys())
                params=list(cur_info.values())
                aff_edit0=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'])
                if aff_edit0:
                    tms.ExecNoQuery('update YN_AD_CDC set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
                aff_edit+=aff_edit0

            elif cur_kw in kw_deleted:
                aff_del+=tms.ExecNoQuery('update YN_AD_CDC set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
            
            else:
                if len(kw_need_add)<need_count:
                    kw_need_add.append(cur_kw)
                    cur_info['action_type']='新增'
                    cur_info['action_des']='在出单词表,新增'
                    zds=list(cur_info.keys())
                    params=list(cur_info.values())
                    aff_add0=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'])
                    if aff_add0:
                        tms.ExecNoQuery('update YN_AD_CDC set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))                    
                    aff_add+=aff_add0
        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},《YN_AD_CDC》提取,已存在忽略:{aff_ign},需修改:{aff_edit},需新增:{aff_add},删除过忽略:{aff_del}')
        

        aff_add=0
        aff_edit=0
        aff_del=0
        aff_ign=0

        sqlstr2=f'''SELECT top {need_count-len(kw_need_add)} Search_Query,Product_Name_Ad_Name,wordnum,Search_volume,kwfrom
                FROM YN_AD_CSV where product_id=? 
                and (
                        (Search_volume>=50 and wordnum is null and Clicks is not null)
                        or
                        (Search_volume>=50 and wordnum is not null)
                    )
                and is_del=0
                and task_date is null order by kwfrom desc,Search_volume desc
            '''
        rs2=tms.ExecQuery(sqlstr2,(cur_itemid,))
        
        for ys_kw,pname,wordnnn,old_scount,kwfrom in rs2:
            if len(ys_kw.split())==1:
                continue
            cur_info=copy.copy(ad_basic)
            cur_kw=re.sub(r'[%.,/&+-]',' ',ys_kw)
            cur_kw=re.sub(r'\s+', ' ', cur_kw).strip()
            cur_info['Search_Query']=cur_kw
            scount=old_scount
            if wordnnn and not kwfrom:
                new_search_count=await js_req_search_count(page,cur_kw,cur_itemid)
                if new_search_count is not None:
                    aff_ss=tms.ExecNoQuery('update YN_AD_CSV set Search_volume=? where Search_Query=?',(new_search_count,ys_kw))
                    print(f'关键词《{cur_kw}》搜索量 {old_scount} => {new_search_count},行数：{aff_ss}')
                    if new_search_count<50:
                        continue
                    scount=new_search_count
                else:
                    await repeat_reload(page)
                    await asyncio.sleep(6)
                    continue
                    
            cur_info['Product_Name_Ad_Name']=pname
            cur_info['kw_type']='标题拓展词' if wordnnn else '精准匹配关键词'
            if kwfrom:
                cur_info['kw_type']=kwfrom
            cur_info['search_count']=scount
            if cur_kw in kw_active_e:
                aff_ign+=tms.ExecNoQuery('update YN_AD_CSV set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
            elif cur_kw in kw_active_b:
                kw_need_edit.append(cur_kw)
                cur_info['action_type']='修改'
                cur_info['action_des']='在精准匹配关键词表,将匹配方式从 broad => exact'
                zds=list(cur_info.keys())
                params=list(cur_info.values())
                aff_edit0=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'],True)
                if aff_edit0:
                    tms.ExecNoQuery('update YN_AD_CSV set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
                aff_edit+=aff_edit0
            elif cur_kw in kw_deleted:
                aff_del+=tms.ExecNoQuery('update YN_AD_CSV set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))
            
            else:
                if len(kw_need_add)<need_count:
                    kw_need_add.append(cur_kw)
                    cur_info['action_type']='新增'
                    cur_info['action_des']='标题拓展词,搜索量>=50' if wordnnn else '精准匹配关键词,搜索量>=50'
                    zds=list(cur_info.keys())
                    params=list(cur_info.values())
                    aff_add0=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'],True)
                    if aff_add0:
                        tms.ExecNoQuery('update YN_AD_CSV set task_date=? where Product_ID=? and Search_Query=?',(today_date,cur_itemid,ys_kw))                    
                    aff_add+=aff_add0

        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},《YN_AD_CSV》提取,已存在忽略:{aff_ign},需修改:{aff_edit},需新增:{aff_add},删除过忽略:{aff_del}')


        rs_ttt= tms.ExecQuery('select count(*) from ShopeeADTask where product_id=? and task_date=CAST(GETDATE() as date)',(cur_itemid,))
        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},存入任务表,需新增词:{len(kw_need_add)},实际:{rs_ttt[0][0]},需修改词:{len(kw_need_edit)}')

        pj+=1


    tms.ExecNoQuery('update houtai set ad_task_added=1 where BrowserID=?',(bid,))
    return campaign_dict

async def check_adtask(page:Page,basic_info,pid,campid,action_type='新增'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']

    kw_list=await js_get_xp_adlist_keyword(page,campid)
    kw_active_e=[]
    kw_active_b=[]
    kw_deleted=[]
    kw_active_with_price={}
    error_price_list=[]
    for kitem in kw_list:
        kw_state=kitem['state']
        kw_type=kitem['match_type']
        kw=kitem['keyword']
        bid_price=int(kitem['bid_price']/100000)
        if kw_state=='active':
            if kw_type=='exact':
                kw_active_e.append(kw)
            elif kw_type=='broad':
                kw_active_b.append(kw)
                error_price_list.append(['edit_ktype',kw,300])
            kw_active_with_price[kw]=[kw_type,bid_price]
            if bid_price>1500:
                error_price_list.append(['edit_price',kw,300])
            
        else:
            kw_deleted.append(kw)
    
    logging.info(f'店铺:《{dpName}》,广告:{campid},错误增加队列:{len(error_price_list)} 个,开始修正...')
    xz=0
    for ekitem in error_price_list:
        res_xz=await js_mass_edit_kw(page,basic_info,campid,ekitem)
        if res_xz['code']==0:
            xz+=1
    
    logging.info(f'店铺:《{dpName}》,广告:{campid},错误增加队列:{len(error_price_list)} 个,修正完成,共修正{xz}个错误')
        
    suc=0
    fail=0
    cur_time=datetime.datetime.now()
    if action_type=='新增':
        rs= tms.ExecQuery('select Search_Query,action_type from ShopeeADTask where product_id=? and ck_state=0 and target_price is null and action_type in (?,?)',(pid,'新增','修改'))
        
        for kw,aty in rs:
            aff=0
            if kw in kw_deleted:

                aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=1,ck_state=1,update_time=?,action_des=? where product_id=? and Search_Query=? and action_type=?',(cur_time,'在删除列表',pid,kw,'新增'))
                suc+=1
            else:
                if aty=='新增':

                    if kw in kw_active_e:

                        wy_bid_price=kw_active_with_price[kw][1]
                        if wy_bid_price in [300,400]:
                            aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=1,ck_state=1,update_time=? where product_id=? and Search_Query=? and action_type=?',(cur_time,pid,kw,'新增'))
                            suc+=aff
                        else:
                            aff00=tms.ExecNoQuery(f'update ShopeeADTask set target_price=300,action_type=?,update_time=? where product_id=? and Search_Query=? and action_type=?',('修改',cur_time,pid,kw,'新增'))
                            logging.info(f'店铺:《{dpName}》,{bid},pid:{pid},词《{kw}》,价格出错加入修改任务:{aff00}')
                            fail+=aff00
                    elif kw in kw_active_b:
                        aff=tms.ExecNoQuery(f'update ShopeeADTask set action_type=?,update_time=? where product_id=? and Search_Query=? and action_type=?',('修改',cur_time,pid,kw,'新增'))
                        fail+=aff
                    else:
                        aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=0,ck_state=0 where product_id=? and Search_Query=? and action_type=?',(pid,kw,'新增'))
                        fail+=1

                elif aty=='修改':
                    if kw in kw_active_e:
                        aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=1,ck_state=1,update_time=? where product_id=? and Search_Query=? and action_type=?',(cur_time,pid,kw,'修改'))
                        suc+=aff
                    else:
                        fail+=1
            
        logging.info(f'店铺:《{dpName}》,{bid},pid:{pid},《新增任务验证》，需验证:{len(rs)}个,验证执行成功:{suc},验证执行失败:{fail}')
    
    elif action_type=='删除':
        rs= tms.ExecQuery('select Search_Query from ShopeeADTask where product_id=? and ck_state=0 and action_type=?',(pid,'删除'))

        for r in rs:
            del_kw=r[0]
            if del_kw not in kw_active_with_price:
                aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=1,ck_state=1,update_time=? where product_id=? and Search_Query=? and action_type=?',(cur_time,pid,del_kw,'删除'))
                suc+=aff
            else:
                fail+=1

        logging.info(f'店铺:《{dpName}》,{bid},pid:{pid},《删除任务验证》，需验证:{len(rs)}个,验证执行成功:{suc},验证执行失败:{fail}')

    elif action_type=='修改':
        rs= tms.ExecQuery('select Search_Query,target_price from ShopeeADTask where product_id=? and ck_state=0 and action_type=?',(pid,'修改'))
        for edit_kw,target_price in rs:
            if edit_kw in kw_active_with_price:
                wy_ad_type,wy_bid_price=kw_active_with_price[edit_kw]
                is_yz=False

                if target_price is not None:
                    if (wy_ad_type,wy_bid_price)==('exact',target_price):
                        is_yz=True
                else:
                    if wy_ad_type=='exact':
                        is_yz=True
                if is_yz:
                    aff=tms.ExecNoQuery(f'update ShopeeADTask set gt_state=1,ck_state=1,update_time=? where product_id=? and Search_Query=? and action_type=?',(cur_time,pid,edit_kw,'修改'))
                    suc+=aff
                else:
                    fail+=1
            else:
                fail+=1

        logging.info(f'店铺:《{dpName}》,{bid},pid:{pid},《修改任务验证》，需验证:{len(rs)}个,验证执行成功:{suc},验证执行失败:{fail}')
    
async def del_kw_to_adtask(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    shopid=await js_req_shopid(page,basic_info)
    campaign_dict=await get_camps(page,basic_info,is_del_task=True)

    today_date=datetime.date.today().strftime('%Y-%m-%d')
    pj=1
    all_success=True
    for cur_itemid,cur_campaign_id in campaign_dict.items():
        
        kw_reports=await js_get_adkw_with_rpt(page,basic_info,cur_campaign_id)
        if kw_reports is None:
            all_success=False
            continue

        cur_time=int(time.time())
        rs_ttt=tms.ExecQuery('select Search_Query,uploadTime from YN_AD_Info where Product_ID=?',(cur_itemid,))
        for ccc_kw,upload_time in rs_ttt:
            ccc_kw=re.sub(r'[%.,/&()+-]',' ',ccc_kw)
            ccc_kw=re.sub(r'\s+', ' ', ccc_kw).strip()
            if kw_reports.get(ccc_kw):
                kw_reports[ccc_kw]['stay_time']=(cur_time-upload_time)/86400
        
        # with open(f'JSONDATA/{cur_campaign_id}_广告词参数.json','w',encoding='utf-8') as f:
        #     json.dump(kw_reports,f,ensure_ascii=False,indent=4)

        kw_del=[]
        ad_basic={
            'shopid':shopid,
            'bid':bid,
            'BName':dpName,
            'GroupName':gname,
            'SJBM':sjbm,
            'YYBM':yybm,
            'UserName':userName,
            'password':password,
            'product_id':cur_itemid,
            'campaign_id':cur_campaign_id,
            'ad_url':f'https://seller.shopee.co.id/portal/marketing/pas/product/manual/{cur_campaign_id}?group=last_three_month',
            'purl':f'https://shopee.co.id/product/{shopid}/{cur_itemid}/',
            'task_date':today_date,
            'action_type':'删除',
            'kw_type':'网页三个月数据'
        }
        aff_del=0
        for rpkw,rpcs in kw_reports.items():
            is_del=False
            cur_info=copy.copy(ad_basic)
            cur_info['Search_Query']=rpkw
            if rpcs['bid_price']>300:
                continue
            if rpcs.get('direct_order_amount') and rpcs.get('direct_order_amount')>0:
                continue
            if rpcs.get('CTR') is not None:
                kw_gmv=rpcs['direct_gmv']
                if rpcs['CTR']<0.05 and rpcs['match_type']=='broad' and rpcs['Clicks']>=2 and kw_gmv==0:
                    kw_del.append(rpkw)
                    cur_info['action_des']='CTR<0.05 且 Match Type=broad 且 Clicks>=2 且 GMV=0'
                    is_del=True

                
                elif rpcs['CTR']<0.05 and rpcs['match_type']=='exact' and rpcs['Clicks']>=4 and kw_gmv==0:
                    kw_del.append(rpkw)
                    cur_info['action_des']='CTR<0.05 且 Match Type=exact 且 Clicks>=4 且 GMV=0'
                    is_del=True

                elif rpcs['cost']>=30000 and kw_gmv==0 and rpcs['bid_price']==300:
                    kw_del.append(rpkw)
                    cur_info['action_des']='Expense>=30000  且 GMV=0 且 bid_price=300'
                    is_del=True

                elif rpcs['impression']>=100 and rpcs['Clicks']==0:
                    kw_del.append(rpkw)
                    cur_info['action_des']='impression>=100  且 Clicks=0'
                    is_del=True

                # elif rpcs['Clicks']==0 and rpcs.get('stay_time') and rpcs.get('stay_time')>=15:
                #     kw_del.append(rpkw)
                #     cur_info['action_des']='Clicks=0  且 关键词添加时间大于等于15天'
                #     is_del=True

                elif rpcs['cost']>=100000 and kw_gmv==0 and rpcs['bid_price']>300:
                    kw_del.append(rpkw)
                    cur_info['action_des']='Expense>=100000  且 GMV=0 且 bid_price>=300'
                    is_del=True
            if is_del:
                zds=list(cur_info.keys())
                params=list(cur_info.values())
                aff=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'],True)
                aff_del+=aff

        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},存入任务表,需删除:{len(kw_del)},实际添加删除任务:{aff_del}')
        pj+=1

    if all_success:
        aff_bj=tms.ExecNoQuery('update houtai set ad_task_deleted=1 where BrowserID=?',(bid,))
        logging.info(f'店铺:《{dpName}》,{bid},所有广告都已成功添加删除任务,标记状态:{aff_bj}')
    else:
        logging.info(f'店铺:《{dpName}》,{bid},存在广告未添加删除任务')
    return campaign_dict

async def edit_kw_to_adtask(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    shopid=await js_req_shopid(page,basic_info)
    campaign_dict=await get_camps(page,basic_info,is_del_task=True)
    today_date=datetime.date.today().strftime('%Y-%m-%d')
    pj=1
    all_success=True
    
    for cur_itemid,cur_campaign_id in campaign_dict.items():
        
        kw_reports=await js_get_adkw_with_rpt(page,basic_info,cur_campaign_id)


        if kw_reports is None:
            all_success=False
            continue
        error_price_list=[]

        
        # with open(f'JSONDATA/{cur_campaign_id}_广告词参数.json','w',encoding='utf-8') as f:
        #     json.dump(kw_reports,f,ensure_ascii=False,indent=4)

        ad_basic={
            'shopid':shopid,
            'bid':bid,
            'BName':dpName,
            'GroupName':gname,
            'SJBM':sjbm,
            'YYBM':yybm,
            'UserName':userName,
            'password':password,
            'product_id':cur_itemid,
            'campaign_id':cur_campaign_id,
            'ad_url':f'https://seller.shopee.co.id/portal/marketing/pas/product/manual/{cur_campaign_id}?group=last_three_month',
            'purl':f'https://shopee.co.id/product/{shopid}/{cur_itemid}/',
            'task_date':today_date,
            'action_type':'修改',
            'kw_type':'网页三个月数据',
            'gt_state':0,
            'ck_state':0
        }
        aff_edit=0
        ign_count=0
        kws_edit={}
        for rpkw,rpcs in kw_reports.items():

            cur_info=copy.copy(ad_basic)
            cur_info['Search_Query']=rpkw
            is_xg=False
            kw_type=rpcs['match_type']

            if rpcs.get('bid_price') and int(rpcs['bid_price'])>800:
                error_price_list.append(['edit_price',rpkw,300])


            if rpcs.get('CTR') is not None:
                kw_conversions=rpcs['direct_order_amount']
                cur_acos=rpcs['direct_cir']
                if kw_conversions>=1:
                    if cur_acos<0.3:
                        bid_price=300+kw_conversions*100
                        if bid_price>800:
                            bid_price=800
                        ad_type='Exact match'
                
                    elif 0.3<=cur_acos<=0.5:
                        bid_price=300+kw_conversions*50
                        if bid_price>550:
                            bid_price=550
                        ad_type='Exact match'
                    else:
                        bid_price=200
                        ad_type='Exact match'

                    old_bid_price=int(rpcs['bid_price'])
                    old_ad_type='Exact match' if rpcs['match_type']=='exact' else 'Broad match'
                    if (bid_price,ad_type)==(old_bid_price,old_ad_type):
                        #print('忽略修改队列',cur_itemid,cur_kw,cur_kw_price_item)
                        ign_count+=1
                    else:
                        cur_info['target_price'] = bid_price
                        bid_pirce_str='' if bid_price==old_bid_price  else f'bid_price:{old_bid_price}=>{bid_price}'
                        match_str='' if ad_type==old_ad_type else f'match_type:{old_ad_type}=>{ad_type}'
                        cur_info['action_des']=f'conversions:{kw_conversions},ACOS:{cur_acos} | {bid_pirce_str} {match_str}'
                        kws_edit[rpkw]=[bid_price,ad_type]

                        zds=list(cur_info.keys())
                        params=list(cur_info.values())
                        aff=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'],True)
                        aff_edit+=aff
                
                elif kw_type=='broad':
                    is_xg=True
            elif kw_type=='broad':
                is_xg=True
            
            if is_xg:
                cur_info['action_des']='创建广告初始词,broad => exact'
                zds=list(cur_info.keys())
                params=list(cur_info.values())
                kws_edit[rpkw]=[None,'Broad match']
                aff=tms.merge_into('ShopeeADTask',zds,params,['product_id','Search_Query','action_type'],True)
                aff_edit+=aff
        
        xz=0
        for ekitem in error_price_list:
            res_xz=await js_mass_edit_kw(page,basic_info,cur_campaign_id,ekitem)
            if res_xz['code']==0:
                xz+=1
        logging.info(f'店铺:《{dpName}》,广告:{cur_campaign_id},错误增加队列:{len(error_price_list)} 个,修正完成,共修正{xz}个错误')

        logging.info(f'店铺:《{dpName}》,{bid},第 {pj} 个itemid:{cur_itemid},存入任务表,需添加修改任务:{len(kws_edit.keys())},实际添加修改任务:{aff_edit},忽略添加修改任务:{ign_count}')
        pj+=1

    if all_success:
        aff_bj=tms.ExecNoQuery('update houtai set ad_task_edited=1 where BrowserID=?',(bid,))
        logging.info(f'店铺:《{dpName}》,{bid},所有广告都已成功添加修改任务,标记状态:{aff_bj}')
    else:
        logging.info(f'店铺:《{dpName}》,{bid},存在广告未添加修改任务')
    return campaign_dict

pat_phlm=re.compile(r'cat(?:\.\d+)*\.(\d+)')
async def js_get_ph_plist(page:Page,basic_info,lm_url,price_range=None):
    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    mat_lmid=pat_phlm.search(lm_url)
    if not mat_lmid:
        return
    lmid=mat_lmid.group(1)
    url='https://shopee.ph/api/v4/search/search_items'
    price_min=800
    price_max=3000
    if price_range:
        price_min,price_max=price_range
    newest=0
    max_pcount=420
    while True:
        if newest>=max_pcount:
            break
        params = {
            'by': 'sales',
            'limit': 60,
            'locations': 'Overseas',
            'match_id': lmid,
            'newest': newest,
            'order': 'desc',
            'page_type': 'search',
            'price_max': price_max,
            'price_min': price_min,
            'rating_filter': 4,
            'scenario': 'PAGE_CATEGORY',
            'shippings': 5,
            'source': 'SRP',
            'version': 2,
            #'view_session_id': '120b8de4-0a63-42d3-aed7-3cbb7fe869a3'
        }

        js_data=await make_request(page,url,params)
        if js_data:
            print(js_data)
            if newest==0:
                max_pcount=js_data['total_count']
                logging.info(f'店铺:《{dpName}》,获取类目产品总数量:{max_pcount}')
            with open(f'JSONDATA/菲律宾类目{lmid}_{newest}.json','w',encoding='utf-8') as f:
                json.dump(js_data,f,ensure_ascii=False,indent=4)
            newest+=60
        else:
            
            return

async def js_get_listkw(page,basic_info,kw_info,j,kj):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    kid,pid,ykw,ptit,source_kw,source_type=kw_info
    new_source_type=source_type
    ys_words=[k.lower() for k in ykw.split()]
    url='https://seller.shopee.co.id/api/pas/v1/setup_helper/list_keyword_hint/'
    spdc,ck=await get_ck_and_spc_cds(page)
    
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }
    aff=0
    qj_suc=0
    for letter in list(string.ascii_lowercase)+[str(sz) for sz in range(10)]:
        kw=f'{ykw} {letter}'
        data={
            'item_id': int(pid), 
            'keyword': kw,
            'placement': 0,
            'suggest_log_data': {'page': 'suggest_creation'}
        }
        

        js_data = await make_request(page,url,params,data)
        cur_time=datetime.datetime.now()
        
        if js_data:
            if js_data['msg']=='OK':
                qj_suc+=1
                listdata=js_data['data']
                #print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{kw}》,下拉词共采集{len(listdata)}个')
                
                for item in listdata:
                    cur_kw=item['keyword']
                    cur_fg_wrods=[k.lower() for k in  cur_kw.split()]
                    cur_kw_num=len(cur_fg_wrods)

                    if not set(ys_words)<=set(cur_fg_wrods):
                        #print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{kw}》,采集的:《{cur_kw}》,词不完全包含原始词')
                        continue

                    rs=tms.ExecQuery('select count(*) from ShopeeeADTZKW where kw=? and ProductID=?',(cur_kw,pid))

                    if rs[0][0]>0:
                        #print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{kw}》,采集的:《{cur_kw}》,数据表已存在')
                        pass
                    else:
                        pdata={
                            'ProductID':pid,
                            'PTitle':ptit,
                            'source_kw':ykw,
                            'source_kw_type':new_source_type,
                            'kw':cur_kw,
                            'WordNum':cur_kw_num,
                            'YN_kw':ykw,
                            'update_time':cur_time,
                            'upload_time':cur_time
                        }
                        if cur_kw_num<=10:
                            zdstr=','.join(pdata.keys())
                            paras=list(pdata.values())
                            wstr=','.join(['?' for _ in paras])
                            aff+=tms.ExecNoQuery(f'insert into ShopeeeADTZKW({zdstr}) values({wstr})',paras)
                        
            else:
                print(js_data)
    if qj_suc>0:
        aff2=tms.ExecNoQuery('update ShopeeeADTZKW set is_cj_kw=1,update_time=? where id=? and is_cj_kw=0',(cur_time,kid))
        print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{ykw}》,请求成功次数:{qj_suc},成功保存:{aff}个长尾词,更改采集状态:{aff2}')
    
        return aff

async def js_get_related_kw(page:Page,basic_info,kitem,j,kj):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    kid,pid,kw,ptitle,camid=kitem
    api_url='https://seller.shopee.co.id/api/pas/v1/setup_helper/search_keyword/'
    spdc,ck=await get_ck_and_spc_cds(page)
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }

    data={
            "keyword":kw,
            "campaign_type":"product",
            "item_id":int(pid),
            "suggest_log_data":{"campaign_id":int(camid),"page":"suggest_after_creation"}
    }

    qj_suc=0
    aff=0
    js_data=await make_request(page,api_url,params,data)
    cur_time=datetime.datetime.now()
    if js_data['msg']=='OK':
        qj_suc+=1
        listdata=js_data['data']

        for item in listdata:
            if item['keyword']==kw:
                continue
            cur_kw=item['keyword']
            
            cur_kw_num=len(cur_kw.split())
            rs=tms.ExecQuery('select count(*) from ShopeeeADTZKW where kw=? and ProductID=?',(cur_kw,pid))

            if rs[0][0]>0:
                #print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{kw}》,采集的:《{cur_kw}》,数据表已存在')
                pass
            else:
                pdata={
                    'ProductID':pid,
                    'PTitle':ptitle,
                    'source_kw':kw,
                    'source_kw_type':'关联词',
                    'kw':cur_kw,
                    'SearchCount':item['search_volume'],
                    'WordNum':cur_kw_num,
                    'YN_kw':kw,
                    'update_time':cur_time,
                    'upload_time':cur_time
                }

                zdstr=','.join(pdata.keys())
                paras=list(pdata.values())
                wstr=','.join(['?' for _ in paras])
                aff+=tms.ExecNoQuery(f'insert into ShopeeeADTZKW({zdstr}) values({wstr})',paras)

        if qj_suc>0:
            aff2=tms.ExecNoQuery('update ShopeeADKWInfo set is_tz=1,tz_time=? where id=? and is_tz=0',(cur_time,kid))
            print(f'店铺:《{dpName}》,{bid},第{j}个广告,第{kj}个词:《{kw}》,关联词列表长度:{len(listdata)},保存:{aff}个,更改采集状态:{aff2}')
        
            return aff

    else:
        logging.info(js_data)

async def js_update_shoppe_adkw(page:Page,basic_info,fail_items=None):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    shopid= basic_info['shopid']
    if not shopid:
        shopid=await js_req_shopid(page,basic_info)
    spdc,ck=await get_ck_and_spc_cds(page)
    params={'SPC_CDS':spdc,'SPC_CDS_VER':2}
    if fail_items:
        camp_dict=fail_items
    else:
        camp_dict=await get_camps_with_tit(page,basic_info)
    res={'suc':0,'fail':0,'fail_items':{}}
    pj=1
    for pid,camid_info in camp_dict.items():
        camid,ptit=camid_info
        api_url='https://seller.shopee.co.id/api/pas/v1/product/manual/list_keyword_with_recommended_price/'
        post_data={"campaign_id":int(camid),"need_recommended_price":True,"header":{}}
        kw_reports={}

        json_data= await make_request(page,api_url,params,post_data)
        if json_data:
            kw_kprs=json_data.get('data')
            if kw_kprs:
                for kw_kpr in kw_kprs:
                    if kw_kpr['keyword']['state']=='active':
                        cur_kw_info={}
                        curkw=kw_kpr['keyword']['keyword']
                        cur_kw_info['bid_price']=int(kw_kpr['keyword']['bid_price']/100000)
                        cur_kw_info['match_type']=kw_kpr['keyword']['match_type']
                        cur_kw_info['state']=kw_kpr['keyword']['state']
                        cur_kw_info['recommended_price']=int(kw_kpr['keyword']['recommended_price']/100000)
                        kw_reports[curkw]=cur_kw_info
        else:
            res['fail_items'][pid]=[camid,ptit]
            res['fail']+=1
            continue

        st_time,ed_time=get_shopee_timestamp()
        api_url='https://seller.shopee.co.id/api/pas/v1/report/get/'
        post_data={"start_time":st_time,"end_time":ed_time,"campaign_type":"product",
                "agg_type":"keyword","filter_params":{"campaign_id":int(camid)},"need_ratio":True,"header":{}}
        json_data= await make_request(page,api_url,params,post_data)
        if json_data:
            kw_zbs=json_data.get('data')
            if kw_zbs:
                for kw_zb in kw_zbs:
                    curkw=kw_zb['key']
                    if kw_reports.get(curkw):
                        cur_kw_info=kw_reports[curkw]
                        cur_kw_info['CTR']=kw_zb['metrics']['ctr']
                        cur_kw_info['Clicks']=kw_zb['metrics']['click']
                        cur_kw_info['cost']=kw_zb['metrics']['cost']/100000
                        cur_kw_info['impression']=kw_zb['metrics']['impression']
                        cur_kw_info['direct_gmv']=kw_zb['metrics']['direct_gmv']/100000
                        cur_kw_info['direct_order_amount']=kw_zb['metrics']['direct_order_amount']
                        cur_kw_info['direct_cir']=kw_zb['metrics']['direct_cir']
                        kw_reports[curkw]=cur_kw_info
        else:
            res['fail_items'][pid]=[camid,ptit]
            res['fail']+=1
            continue
        suc=0
        for kw,kwinfo in kw_reports.items():

            kdata={
                'Product_ID':pid,
                'ShopID':shopid,
                'Campaign_ID':camid,
                'KW':kw,
                'bid_price':kwinfo['bid_price'],
                'recommended_price':kwinfo['recommended_price'],
                'Match_Type':kwinfo['match_type'],
                'KW_State':kwinfo['state'],
                'CTR':kwinfo.get('CTR'),
                'ACOS':kwinfo.get('direct_cir'),
                'GMV':kwinfo.get('direct_gmv'),
                'click':kwinfo.get('Clicks'),
                'expense':kwinfo.get('cost'),
                'impression':kwinfo.get('impression'),
                'conversion':kwinfo.get('direct_order_amount'),
                'bid':bid,
                'bname':dpName,
                'gName':gname,
                'Password':password,
                'UserName':userName,
                'SJBM':sjbm,
                'YYBM':yybm,
                'ptitle':ptit
            }
            zds=list(kdata.keys())
            paras=list(kdata.values())
            aff=tms.merge_into('ShopeeADKWInfo',zds,paras,['Product_ID','KW'],True)
            suc+=aff
        res['suc']+=1
        logging.info(f'店铺:《{dpName}》,{bid},第{pj}个pid:{pid},成功更新:{suc}个广告词')
        pj+=1
    return res

async def js_get_pro_trait_list(page:Page,basic_info,is_only_one=False):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid= basic_info['shopid']
    if not shopid:
        shopid=await js_req_shopid(page,basic_info)
    url=f'https://{basic_info["host"]}/api/pas/v1/setup_helper/product_selector/query/'
    params= await get_params(page,basic_info)
    random_uuid = str(uuid.uuid4())
    st_time=get_yn_0timestamp()
    p_no_ad=[]
    p_has_ad=[]
    itemids_isadd=[]
    ppp=1
    last_token=''
    while True:
        data = {"pagination":{"last_token":last_token,"limit":50},
                "order":None,"show_all_item":False,
                "filter":{"recommendation_type":None},
                "ads_information":{"product_placement":"all","end_time":0,"start_time":st_time,"bidding_strategy":"roi_two"},
                "reference_id":random_uuid,
                "header":{}
                }

        js_data = await make_request(page,url,params,data)
        if js_data:
            if js_data['code']==0:
                entry_list=js_data['data']['entry_list']
                for entry in entry_list:
                    
                    cur_itemid= entry['item_id']
                    if cur_itemid not in itemids_isadd:
                        trait_list=entry.get('trait_list')
                        if not trait_list:
                            p_no_ad.append(entry)
                        else:
                            if 'npb_added' in trait_list[0] and len(trait_list)==1:
                                p_no_ad.append(entry)
                            else:
                                p_has_ad.append(entry)
                        itemids_isadd.append(cur_itemid)
                next_token=js_data['data']['next_page_token']
                logging.info(f'店铺:《{dpName}》,{bid},第{ppp}页,加载 {len(entry_list)} 个产品')
                if not next_token:
                    break
                if is_only_one:
                    break
                last_token=next_token


            else:
                break
            
        else:
            break

        ppp+=1


    logging.info(f'店铺:《{dpName}》,{bid},共{ppp}页,当前共{len(p_no_ad)+len(p_has_ad)}个广告产品,其中还没有广告的产品 {len(p_no_ad)} 个')
    return p_no_ad,p_has_ad

async def update_CreAD_Task(page:Page,basic_info,rw_msg,status='后'):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    max_count=basic_info['ads_target_quantity']
    shopid=await js_req_shopid(page,basic_info)
    res_ad=await js_get_pro_trait_list(page,basic_info)
    oning_count=await get_camp_info(page,basic_info,True)
    if res_ad:
        p_no_ad,p_has_ad=res_ad

        if status=='前':
            need_zd=['need_create_before','has_created_before']
            if len(p_no_ad)>0:
                rw_msg='正在运行'
        elif status=='后':
            need_zd=['need_create_after','has_created_after']
            if len(p_no_ad)>0:
                rw_msg=f'还有 {len(p_no_ad)} 个未创建'
        zds=['shopid','bid','BName','GroupName','userName','password','SJBM','YYBM','rw_msg']+need_zd
        paras=[shopid,bid,dpName,gname,userName,password,sjbm,yybm,rw_msg,len(p_no_ad),len(p_has_ad)]
        aff0=tms.merge_into('ShopeeCreadTask',zds,paras,['bid'],True)
        logging.info(f'店铺:《{dpName}》,{bid},更新创建{status}任务:{aff0}')
        if status=='前' and max_count and oning_count>=max_count:
            return []
        return p_no_ad
    else:
        zds=['shopid','bid','BName','GroupName','userName','password','SJBM','YYBM','rw_msg']
        paras=[shopid,bid,dpName,gname,userName,password,sjbm,yybm,'获取广告信息错误']

async def update_adtype(page:Page,basic_info,camp_id):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']

    url=f'https://{host}/api/pas/v1/product/edit/'
    params=await get_params(page,basic_info)
    data={
            "campaign_id":int(camp_id),
            "type":"change_product_placement",
            "change_product_placement":{"product_placement":"search_product"},
            "header":{}
        }
    
    js_data=await make_request(page,url,params,data)
    if js_data:

        if js_data['code']==0:
            logging.info(f'店铺:《{dpName}》,{bid},广告:{camp_id},更改广告类型成功')
            return 1
        else:
            logging.info(js_data)
            logging.info(f'店铺:《{dpName}》,{bid},广告:{camp_id},更改广告类型失败=>{js_data}')
            return 0

async def update_all_adtype(page:Page,basic_info):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    camp_list=await get_entry_list(page,basic_info)

    suc=0
    fail=0
    now_time=int(time.time())
    for entry in camp_list:
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement=='all':
            campaign_id=entry['campaign']['campaign_id']
            item_id=str(entry['manual_product_ads']['item_id'])
            start_time=entry['campaign']['start_time']
            create_days=math.ceil((now_time-start_time)/86400)
            if create_days<=7:
                aff=await update_adtype(page,basic_info,campaign_id)
                if aff:
                    suc+=aff
                else:
                    fail+=1
    

    logging.info(f'店铺:《{dpName}》,{bid},更改广告至search_product,成功:{suc},失败:{fail}')

    return suc

async def js_get_ads_credit(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/pas/v1/meta/get_ads_data/'
    params=await get_params(page,basic_info)

    data={"info_type_list":["ads_credit","ads_account","ads_expense"]}

    js_data=await make_request(page,url,params,data)

    if js_data:
        ads_credit=round(js_data['data']['ads_credit']['total']/100000,2)
        logging.info(f'店铺:《{dpName}》,{bid},获取广告余额:{ads_credit}')
        return ads_credit

async def js_change_daily(page:Page,basic_info,campid_list):

    
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)

    url='https://seller.shopee.co.id/api/pas/v1/homepage/mass_edit/'

    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }
    random_uuid = str(uuid.uuid4())
    budget_log_key=f'budget_log_{random_uuid}'
    data = {"campaign_id_list":[int(campid) for campid in campid_list],
            "type":"change_budget",
            "change_budget":{
                "page":"page_homepage",
                "daily_budget":1100000000,
                "budget_log_key":budget_log_key
            }
            }
    
    js_data=await make_request(page,url,params,data)
    suc=False
    if js_data:
        if js_data['code']==0:
            logging.info(f'店铺:《{dpName}》,{bid},成功更正{len(campid_list)} 个广告预算')
            suc=True
    else:
        print(js_data)
        logging.info(f'店铺:《{dpName}》,{bid},成功更正预算失败')
    
    return suc

def fix_excel_xml(input_path, output_path):
    with TemporaryDirectory() as tmpdir:
        # 解压原始文件
        with zipfile.ZipFile(input_path, 'r') as zip_ref:
            zip_ref.extractall(tmpdir)
        
        # 遍历所有工作表XML文件
        worksheets_dir = os.path.join(tmpdir, 'xl', 'worksheets')
        for sheet_file in os.listdir(worksheets_dir):
            if sheet_file.endswith('.xml'):
                sheet_path = os.path.join(worksheets_dir, sheet_file)
                tree = ET.parse(sheet_path)
                root = tree.getroot()
                ns = {'ns': 'http://schemas.openxmlformats.org/spreadsheetml/2006/main'}
                
                # 查找所有pane元素并修复activePane
                for pane in root.findall('.//ns:pane', ns):
                    active_pane = pane.get('activePane')
                    if active_pane not in {'bottomLeft', 'topRight', 'topLeft', 'bottomRight'}:
                        pane.set('activePane', 'topLeft')  # 设置为默认有效值
                tree.write(sheet_path, encoding='UTF-8', xml_declaration=True)
        
        # 重新压缩为修复后的Excel文件
        with zipfile.ZipFile(output_path, 'w') as zip_out:
            for root, dirs, files in os.walk(tmpdir):
                for file in files:
                    file_path = os.path.join(root, file)
                    arcname = os.path.relpath(file_path, tmpdir)
                    zip_out.write(file_path, arcname)

async def js_downlond_mb(page:Page,basic_info,mbid):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    catid=MBID_CatID_dict.get(str(mbid))
    params={
        'category_id':catid,
        'timestamp':int(time.time()*1000),
        'SPC_CDS':spdc,
        'SPC_CDS_VER':2
        }
    api_down_url='https://seller.shopee.co.id/api/tool/mass_product/download_template/'
    cot_byte_list= await js_get_cot(page,api_down_url,params)
    logging.info(f'店铺:《{dpName}》,bid:{bid},模版:{mbid},下载长度{len(cot_byte_list)}')
    excel_content =bytes(cot_byte_list)
    # 用 pandas 读取再写出干净的 Excel 文件
    execel_path=f'官方模版表/{bid}_{mbid}.xlsx'
    with open(execel_path,'wb') as f:
        f.write(excel_content)
    
    fix_excel_xml(execel_path,execel_path)
    return execel_path

async def js_downlond_basic_mb(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    spdc,ck=await get_ck_and_spc_cds(page)
    params={
        'timestamp':int(time.time()*1000),
        'SPC_CDS':spdc,
        'SPC_CDS_VER':2
        }
    api_down_url='https://seller.shopee.co.id/api/tool/mass_product/download_basic_template/'
    cot_byte_list= await js_get_cot(page,api_down_url,params)
    if cot_byte_list:
        logging.info(f'店铺:《{dpName}》,{bid},通用模版,下载长度{len(cot_byte_list)}')
        excel_content =bytes(cot_byte_list)
        # 用 pandas 读取再写出干净的 Excel 文件
        execel_path=f'官方模版表/{bid}_通用模版.xlsx'
        with open(execel_path,'wb') as f:
            f.write(excel_content)
        
        fix_excel_xml(execel_path,execel_path)
        return execel_path

async def get_params(page,basic_info):
    spdc,ck=await get_ck_and_spc_cds(page)
    country=basic_info['Country']
    country_region_map={
        '印尼':'id',
        '新加坡':'sg',
        '泰国':'th',
        '越南':'vn',
        '马来西亚':'my'
    }

    if basic_info['is_KJ']==1:
        shopid=await js_req_shopid(page,basic_info)
        return {
        'SPC_CDS': spdc,
        'SPC_CDS_VER': 2,
        'cnsc_shop_id': int(shopid),
        'cbsc_shop_region': country_region_map.get(country)
        }

    else:
        return {
            'SPC_CDS': spdc,
            'SPC_CDS_VER': 2,
        }

async def js_get_rec_kw(page:Page,basic_info,item_id,ptit):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/pas/v1/setup_helper/list_recommended_keyword/'
    bid_price=300
    params=await get_params(page,basic_info)

    data={
            "campaign_type":"product",
            "item_id":int(item_id),
            "suggest_log_data":{"page":"suggest_creation","campaign_id":None}
        }
    
    json_data=await make_request(page,url,params,data)

    if not json_data:
        return
    keyword_list=[]
    is_pp=0
    no_pp=0
    session=requests.session()
    session.headers=DS_Headers
    adapter = HTTPAdapter(pool_connections=50, pool_maxsize=50)
    session.mount('https://', adapter)
    session.mount('http://', adapter)
    for word_num in range(2,0,-1):
        

        with thddd.ThreadPoolExecutor(max_workers=20) as t:
            tasks=[]
            kj=1
            for kwinfo in json_data['data']:
                ad_kw=kwinfo['keyword']
                if len(ad_kw.split())<word_num:
                    logging.info(f'第 {kj}个 关键词 {ad_kw},单词数量小于 {word_num},跳过')
                    kj+=1
                    continue
                ques=f'关键词 {ad_kw} 产品标题 {ptit} 关键词跟标题的匹配度高不高，适不适合用来做广告关键词？只需要回答：高度匹配或者比较匹配或者不匹配'
                tasks.append(t.submit(get_gdppkw,session,ques,kj,ad_kw))
                kj+=1
            for t0 in thddd.as_completed(tasks):
                res_kj,res_kw=t0.result()
                if res_kw:
                    logging.info(f'第 {res_kj}个 关键词 {res_kw},与标题高度匹配,符合')
                    keyword_list.append({"bid_price":bid_price*100000,"keyword":res_kw,"match_type":"exact"})
                    is_pp+=1
                else:
                    logging.info(f'第 {res_kj}个关键词,与标题不匹配')
                    no_pp+=1
                
        if keyword_list:
            break
        
    logging.info(f"店铺:《{dpName}》,{bid},共 {len( json_data['data'])} 个推荐词,其中符合匹配:{is_pp} 个,不符合匹配:{no_pp} 个")
       
    return keyword_list
    
async def js_publish_ad(page:Page,basic_info,item_id,pname):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    url=f'https://{host}/api/pas/v1/product/publish/'
    params=await get_params(page,basic_info)
    random_uuid = str(uuid.uuid4())
    start_time=get_yn_0timestamp() 
    keyword_list=await js_get_rec_kw(page,basic_info,item_id,pname)
    daily_budget=11000
    dis_bid_price=150
    if keyword_list is None:
        return {'code':-1,'msg':'获取推荐词接口错误'}
    elif len(keyword_list)==0:
        return {'code':-2,'msg':'无推荐词'}
    else:
        logging.info(f'店铺:《{dpName}》,{bid},获取推荐词:{len(keyword_list)} 个')
        data={
                "reference_id":random_uuid,
                "campaign":{
                    "daily_budget":daily_budget*100000,
                    "product_placement":"all",
                    "ecpc":False,
                    "keyword_list":keyword_list,
                    "display_location":{
                        "daily_discover":{"state":"inactive","bid_price":dis_bid_price*100000},
                        "you_may_also_like":{"state":"active","bid_price":dis_bid_price*100000}
                    },
                    "bidding_strategy":"manual",
                    "product_selection":"manual",
                    "start_time":start_time,
                    "end_time":0,
                    "new_product_boost_toggle":False
                },
                "ads_list":[{"item_id":int(item_id)}],
                "header":{}
                }
        json_data=await make_request(page,url,params,data)
        if json_data:
            return json_data
        else:
            return {'code':-1,'msg':'发布广告接口错误'}

async def js_update_kw_report(page:Page,basic_info,camp):
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    password=basic_info['Password']
    gname=basic_info['GroupName']
    yybm=basic_info['YYBM']
    sjbm=basic_info['SJBM']
    host=basic_info['host']
    pid,camid,ptit=camp
    shopid=await js_req_shopid(page,basic_info)
    params=await get_params(page,basic_info)
    api_url=f'https://{host}/api/pas/v1/product/manual/list_keyword_with_recommended_price/'
    post_data={"campaign_id":int(camid),"need_recommended_price":True,"header":{}}
    kw_reports={}
    json_data= await make_request(page,api_url,params,post_data)
    if json_data:
        kw_kprs=json_data.get('data')
        if kw_kprs:
            for kw_kpr in kw_kprs:
                if kw_kpr['keyword']['state']=='active':
                    cur_kw_info={}
                    curkw=kw_kpr['keyword']['keyword']
                    cur_kw_info['bid_price']=kw_kpr['keyword']['bid_price']/100000
                    cur_kw_info['match_type']=kw_kpr['keyword']['match_type']
                    cur_kw_info['state']=kw_kpr['keyword']['state']
                    cur_kw_info['recommended_price']=kw_kpr['keyword']['recommended_price']/100000
                    kw_reports[curkw]=cur_kw_info
        logging.info(f'店铺:《{dpName}》,{bid},广告:{camid},成功获取 {len(kw_kprs)} 个 出价数据,其中active:{len(kw_reports.keys())} 个')
    else:
        return

    api_url=f'https://{host}/api/pas/v1/report/get/'

    for yyy in ['yesterday','last_three_month']:

        hz='yd' if yyy=='yesterday' else '3m'
        st_time,ed_time=get_shopee_timestamp(yyy)
        post_data={
                    "start_time":st_time,
                    "end_time":ed_time,
                    "campaign_type":"product",
                    "agg_type":"keyword",
                    "filter_params":{"campaign_id":int(camid)},
                    "need_ratio":True,"header":{}
                }
        json_data =await make_request(page,api_url,params,post_data)
        if json_data:
            kw_zbs=json_data.get('data')
            if kw_zbs:
                for kw_zb in kw_zbs:
                    curkw=kw_zb['key']
                    if kw_reports.get(curkw):
                        cur_kw_info=kw_reports[curkw]
                        cur_kw_info[f'CTR_{hz}']=kw_zb['metrics']['ctr']
                        cur_kw_info[f'Clicks_{hz}']=kw_zb['metrics']['click']
                        cur_kw_info[f'cost_{hz}']=kw_zb['metrics']['cost']/100000
                        cur_kw_info[f'impression_{hz}']=kw_zb['metrics']['impression']
                        cur_kw_info[f'direct_gmv_{hz}']=kw_zb['metrics']['direct_gmv']/100000
                        cur_kw_info[f'direct_order_amount_{hz}']=kw_zb['metrics']['direct_order_amount']
                        cur_kw_info[f'direct_cir_{hz}']=kw_zb['metrics']['direct_cir']
                        cur_kw_info[f'impression_change_{hz}']=kw_zb['ratio']['impression']
                        kw_reports[curkw]=cur_kw_info
                logging.info(f'店铺:《{dpName}》,{bid},广告:{camid},成功获取 {yyy},{len(kw_zbs)} 个 报表数据')
        else:
            return

    kw_info_list=[]
    aff_ck_state=tms.ExecNoQuery(f"update ShopeeKWReport set KW_State='正在更新' where Product_ID='{pid}' ")
    logging.info(f'店铺:《{dpName}》,{bid},广告:{camid},将 {aff_ck_state} 个词状态标记为正在更新')
    aff=0
    aff_up_max=0
    for kw,kwinfo in kw_reports.items():

        kdata={
            'Product_ID':pid,
            'ShopID':shopid,
            'Campaign_ID':camid,
            'pname':ptit,
            'purl':f"https://{host.replace('seller.','')}/{shopid}/{pid}/",
            'ad_url':f'https://seller.shopee.co.id/portal/marketing/pas/product/manual/{camid}',
            'KW':kw,
            'bid_price':kwinfo['bid_price'],
            'Match_Type':kwinfo['match_type'],
            'KW_State':kwinfo['state'],
            'CTR_yd':kwinfo.get('CTR_yd'),
            'ACOS_yd':kwinfo.get('direct_cir_yd'),
            'GMV_yd':kwinfo.get('direct_gmv_yd'),
            'click_yd':kwinfo.get('Clicks_yd'),
            'expense_yd':kwinfo.get('cost_yd'),
            'impression_yd':kwinfo.get('impression_yd'),
            'conversion_yd':kwinfo.get('direct_order_amount_yd'),
            'impression_change_yd':kwinfo.get('impression_change_yd'),
            'CTR_3m':kwinfo.get('CTR_3m'),
            'ACOS_3m':kwinfo.get('direct_cir_3m'),
            'GMV_3m':kwinfo.get('direct_gmv_3m'),
            'click_3m':kwinfo.get('Clicks_3m'),
            'expense_3m':kwinfo.get('cost_3m'),
            'impression_3m':kwinfo.get('impression_3m'),
            'conversion_3m':kwinfo.get('direct_order_amount_3m'),
            'impression_change_3m':kwinfo.get('impression_change_3m'),
            'bid':bid,
            'bname':dpName,
            'gName':gname,
            'Password':password,
            'UserName':userName,
            'SJBM':sjbm,
            'YYBM':yybm,
            'PingTai':'印尼虾皮'
        }
        rs=tms.ExecQuerydict(f'select * from ShopeeKWReport where Product_ID=? and KW=?',(pid,kw))

        if len(rs)==0:
            kdata['bid_price_max']=kdata['bid_price']
            kdata['CTR_max']=kdata['CTR_yd']
            kdata['ACOS_max']=kdata['ACOS_yd']
            kdata['GMV_max']=kdata['GMV_yd']
            kdata['click_max']=kdata['click_yd']
            kdata['expense_max']=kdata['expense_yd']
            kdata['impression_max']=kdata['impression_yd']
            kdata['conversion_max']=kdata['conversion_3m']
            kdata['impression_change_max']=kdata['impression_change_yd']

        else:
            old_kdata=rs[0]
            old_imp_max=old_kdata['impression_max']
            yd_imp_max=kdata['impression_yd']
            if old_imp_max is None or (old_imp_max is not None and yd_imp_max is not None and yd_imp_max>old_imp_max):
                kdata['bid_price_max']=kdata['bid_price']
                kdata['CTR_max']=kdata['CTR_yd']
                kdata['ACOS_max']=kdata['ACOS_yd']
                kdata['GMV_max']=kdata['GMV_yd']
                kdata['click_max']=kdata['click_yd']
                kdata['expense_max']=kdata['expense_yd']
                kdata['impression_max']=kdata['impression_yd']
                kdata['impression_change_max']=kdata['impression_change_yd']
            old_cd_max=old_kdata['conversion_max']
            cd_3m_max=kdata['conversion_3m']
            if old_cd_max is None or (old_cd_max is not None and cd_3m_max is not None and cd_3m_max>old_cd_max):
                kdata['conversion_max']=kdata['conversion_3m']
            
            add_task_time=old_kdata['add_task_time']
            if add_task_time:
                time_jg=add_task_time.timestamp()-time.time()
                if time_jg>72000:
                    kdata['is_add_task']=0
                else:
                    kdata['is_add_task']=1
            else:
                kdata['is_add_task']=0
        zds=list(kdata.keys())
        paras=list(kdata.values())
        aff+=tms.merge_into('ShopeeKWReport',zds,paras,['Product_ID','KW'],True)
        kw_info_list.append(kdata)
    aff_del=tms.ExecNoQuery(f"delete from ShopeeKWReport  where Product_ID='{pid}' and KW_State!='active' ")
    logging.info(f'店铺:《{dpName}》,{bid},广告:{camid},更新上传{aff}个活跃词,删除了 {aff_del} 个非活跃词')
    return kw_info_list

async def js_mass_edit_kw(page:Page,basic_info,camp_id,kw_item):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    action,kw,bid_price=kw_item
    url=f'https://{host}/api/pas/v1/product/manual/mass_edit_keyword/'
    params=await get_params(page,basic_info)

    if action=='add':
        keyword_edit_list=[{"keyword":kw,"type":"add","add":{"bid_price":bid_price*100000,"match_type":"exact"}}]
    elif action=='del':
        keyword_edit_list=[{"keyword":kw,"type":"delete"}]
    
    elif action=='edit_price':
        keyword_edit_list=[{"keyword":kw,"type":"change_bid_price","change_bid_price":{"price":int(float(bid_price)*100000)}}]
    elif action=='edit_ktype':
        keyword_edit_list=[{"keyword":kw,"type":"change_match_type","change_match_type":{"match_type":"exact"}}]
    
    elif action=='rs':
        keyword_edit_list=[{"keyword":kw,"type":"restore","restore":{"price":30000000}}]

    data={
            "campaign_id":int(camp_id),
            "keyword_edit_list":keyword_edit_list,
            "header":{}
        }

    json_data=await make_request(page,url,params,data)

    return json_data

async def update_campaign(page:Page,basic_info):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    shopid=await js_req_shopid(page,basic_info)
    dpName=basic_info['DpName']
    userName=basic_info['UserName']
    gname=basic_info['GroupName']
    sjbm=basic_info['SJBM']
    yybm=basic_info['YYBM']
    spdc,ck=await get_ck_and_spc_cds(page)
    st_time,ed_time=get_shopee_timestamp()
    offset=0
    limit=500
    campaign_dict={}
    api_url='https://seller.shopee.co.id/api/pas/v1/homepage/query/'
    params={
        'SPC_CDS':spdc,
        'SPC_CDS_VER': 2
    }

    data={
        "start_time":st_time,
        "end_time":ed_time,
        "filter":{"campaign_type":"new_cpc_homepage","state":"all","search_term":""},
        "offset":offset,
        "limit":limit
    }

    json_data=await make_request(page,api_url,params,data)
    
    if not json_data:
        return

    total_count=json_data['data']['total']
    entry_list=json_data['data']['entry_list']
    aff=0
    for entry in entry_list:
        ad_state=entry['state']
        product_placement=entry['manual_product_ads']['product_placement']
        if product_placement!='targeting' and ad_state!='closed':
            campaign_id=str(entry['campaign']['campaign_id'])
            item_id=str(entry['manual_product_ads']['item_id'])
            start_time=entry['campaign']['start_time']
            end_time=entry['campaign']['end_time']
            daily_budget=round(entry['campaign']['daily_budget']/100000)
            now_time=int(time.time())
            trait_list=entry['trait_list']
            rp_data_dict=entry['report']
            ptitle=entry['title']
            rcode=None
            rcode_mat= re.search(r'.*\|(.{8})\|$',ptitle)
            if rcode_mat:
                rcode=rcode_mat.group(1)
            is_sold_out=0
            if not campaign_dict.get(item_id):
                if 'item_sold_out' in trait_list:
                    is_sold_out=1
                create_days=math.ceil((now_time-start_time)/86400)
                campaign_dict[item_id]=copy.copy(entry)
            
                edata={
                    'bid':bid,
                    'shopid':shopid,
                    'bname':dpName,
                    'gname':gname,
                    'campaign_id':campaign_id,
                    'product_id':item_id,
                    'product_placement':product_placement,
                    'title':ptitle,
                    'random_code':rcode,
                    'daily_budget':daily_budget,
                    'impressions':rp_data_dict['impression'],
                    'clicks':rp_data_dict['click'],
                    'CTR':rp_data_dict['ctr'],
                    'expense':round(rp_data_dict['cost']/100000),
                    'GMV':round(rp_data_dict['broad_gmv']/100000),
                    'ROAS':rp_data_dict['broad_roi'],
                    'ACOS':rp_data_dict['broad_cir'],
                    'conversions':rp_data_dict['broad_order'],
                    'items_sold':rp_data_dict['broad_order_amount'],
                    'conversion_rate':rp_data_dict['cr'],
                    'cost_per_conversion':round(rp_data_dict['cpc']/100000),
                    'direct_GMV':round(rp_data_dict['direct_gmv']/100000),
                    'direct_ROAS':rp_data_dict['direct_roi'],
                    'direct_ACOS':rp_data_dict['direct_cir'],
                    'direct_conversions':rp_data_dict['direct_order'],
                    'direct_items_sold':rp_data_dict['direct_order_amount'],
                    'direct_conversion_rate':rp_data_dict['direct_cr'],
                    'cost_per_direct_conversion':round(rp_data_dict['cpdc']/100000),
                    'start_time':start_time,
                    'end_time':end_time,
                    'create_days':create_days,
                    'image':f'https://down-id.img.susercontent.com/file/{entry["image"]}',
                    'state':ad_state,
                    'SJBM':sjbm,
                    'YYBM':yybm,
                    'UserName':userName,
                    'is_sold_out':is_sold_out
                }

                zds=list(edata.keys())
                paras=list(edata.values())
                aff+=tms.merge_into('campaign_YN',zds,paras,['campaign_id'],True)

    logging.info(f'店铺:《{dpName}》,{bid},共有 {len(campaign_dict.keys())} 个广告,成功更新:{aff}个')

    return aff

async def js_xp_del_pro(page:Page,basic_info,itemid_list):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    params=await get_params(page,basic_info)
    params['version']='3.1.0'
    api_url='https://seller.shopee.co.id/api/v3/product/delete_product/'
    data={"product_id_list":itemid_list}
    json_data=await make_request(page,api_url,params,data)

    return json_data

async def js_xp_get_affiliate(page:Page,basic_info,citem):

    bid=basic_info['BrowserID']
    dpName=basic_info['DpName']
    host=basic_info['host']
    PingTai=basic_info['PingTai']
    params=await get_params(page,basic_info)
    params['version']='3.1.0'
    api_url=f'https://{host}/api/v3/affiliateplatform/creator/list'
    
    audience_age_range_list=[None,'<13','13-17','18-22','23-32','33-42','43-52','>52']
    gender_list=[None,'男','女']

    charset = '0123456789abcdefghijklmnopqrstuvwxyz'
    kws = [''.join(p) for p in itertools.product(charset, repeat=3)]
    #kws = ['000','001']
    err_count=0
    cat_id,cat_name,kw_ind=citem
    logging.info(f'类目:{cat_name},还剩 {len(kws[kw_ind:])} 个 词待采集...')
    for kw in kws[kw_ind:]:
        random_uuid = str(uuid.uuid4())
        save_count=0
        p=1
        

        while True:
            data={
                    "offset":(p-1)*500,
                    "page_type":"ams_kol_marketplace",
                    "limit":500,
                    "request_id":random_uuid,
                    "is_liked_kol":False,
                    "affiliate_name":kw,
                    "category_id_list":[cat_id],
                    "order_by":1
                }
            json_data=await make_request(page,api_url,params,data)
            if json_data and json_data['code']==0:
                try:
                    
                    kol_list=json_data['data']['list']
                    if not kol_list:
                        logging.info(f'类目:{cat_name},关键词:《{kw}》,已经是最后一页,退出')
                        break

                    for kol_item in kol_list:
                        rs_dr=tms.ExecQuery('select count(*) from ShopeeKOL where affiliate_id=?',(kol_item['affiliate_id'],))
                        if rs_dr[0][0]>0:
                            continue
                        kdata={
                            'affiliate_id':kol_item['affiliate_id'],
                            'shopee_user_id':kol_item['shopee_user_id'],
                            'username':kol_item['username'],
                            'display_name':kol_item['display_name'],
                            'avatar':kol_item['avatar'],
                            'country':kol_item['country'],
                            'city':kol_item['city'],
                            'registration_time':timestamp_to_sql_datetime(kol_item['registration_time']),
                            'audience_gender_type':gender_list[kol_item['audience_gender_type']],
                            'audience_age_range_type':audience_age_range_list[kol_item['audience_age_range_type']],
                            'total_follower':kol_item['total_follower'],
                            'total_click':kol_item['total_click'],
                            'order_count':kol_item['order'],
                            'GMV':round(kol_item['gmv']/100000),
                            'is_in_kol_whitelist':kol_item['is_in_kol_whitelist'],
                            'is_in_free_sample':kol_item['is_in_free_sample'],
                            'is_target_specific_kol':kol_item['is_in_free_sample'],
                            'is_orange_tick_kol':kol_item['is_orange_tick_kol'],
                            'is_liked_kol':kol_item['is_liked_kol'],
                            'is_good_fulfillment':kol_item['is_good_fulfillment'],
                            'is_ppp_kol':kol_item['is_ppp_kol'],
                            'is_chat_view':kol_item['is_chat_view'],
                            'is_negotiable':kol_item['is_negotiable'],
                            'source':kol_item['source'],
                            'max_price':kol_item['max_price']/100000 if kol_item['max_price'] else kol_item['max_price'],
                            'max_price':kol_item['min_price']/100000 if kol_item['min_price'] else kol_item['min_price'],
                            'gmv_range_min':round(kol_item['order_range'][0]/100000) if kol_item['gmv_range'][0]!=-1 else None,
                            'gmv_range_max':round(kol_item['order_range'][1]/100000) if kol_item['gmv_range'][1]!=-1 else None,
                            'order_range_min':kol_item['order_range'][0] if kol_item['order_range'][0]!=-1 else None,
                            'order_range_max':kol_item['order_range'][1] if kol_item['order_range'][1]!=-1 else None,
                            'popular_website_url':kol_item['popular_social_media']['website_url'],
                            'popular_follower_count':kol_item['popular_social_media']['follower_count'],
                            'popular_follower_count':kol_item['popular_social_media']['follower_count'],
                            'popular_social_user_name':kol_item['popular_social_media']['social_media_user_name'],
                            'cat_id':cat_id,
                            'cat_name':cat_name,
                            'PingTai':PingTai
                            }   

                        zds=list(kdata.keys())
                        paras=list(kdata.values())

                        save_count+=tms.merge_into('ShopeeKOL',zds,paras,['affiliate_id'],True)
                    logging.info(f'类目:{cat_name},关键词:《{kw}》,第 {p} 页,采集达人 {len(kol_list)} 个')
                    p+=1
                except Exception as e:
                    traceback.print_exc()
                    print(e)
                    err_count+=1
            else:
                await repeat_goto(f'https://{host}')
                err_count+=1
                await asyncio.sleep(3)
            
            if err_count>=30:
                break
        
        
        if save_count>0:
            cur_ind=kws.index(kw)+1
            aff_ind=tms.ExecNoQuery('update ShopeeKOLCategory set cj_ind=? where category_id=?',(cur_ind,cat_id))
            logging.info(f'类目:{cat_name},关键词:《{kw}》,共采集保存:{save_count} 个达人,更改索引至:{cur_ind},状态:{aff_ind}')

        if err_count>=30:
            break

        await asyncio.sleep(5)

    return save_count