import traceback
import pyautogui
import os
import time
import pyperclip as clip
import json
import pandas as pd
from datetime import datetime
from sqlalchemy import create_engine
import pypinyin
from bs4 import BeautifulSoup
import random
from jsjj.util.configUtil import *
from jsjj.email.sendEmail import *
from jsjj.util.dbUtil import *
from jsjj.util.emailUtil import *
from jsjj.util.logUtil import *
from jsjj.util.timeUtil import *
from jsjj.util.mouseMoveUtil import *

runMode = '日模式'  # 只有日模式
# 拼多多-商家后台-数据中心-服务数据-领航员
modeName = '拼多多_商家后台_服务数据_店铺领航员'


# 左边点击，直到点击到一级类目，该过程不参与循环
def SPGK(engine, conn):
    # 关闭电霸每天晚上会弹出的一个屏蔽页面
    pyautogui.FAILSAFE = False

    sleep(2)
    pyautogui.scroll(10000)
    # 左边--【物流工具】
    viaCommonUrl('https://mms.pinduoduo.com/orders/order/carriage/list')

    sleep(4)
    pyautogui.keyDown('f12')
    pyautogui.keyUp('f12')

    # 左边--【服务数据】（会变？）
    viaCommonUrl('https://mms.pinduoduo.com/sycm/goods_quality/pilot')

    jsonFromWebShopName = getSjhtShopName()

    # ==============F12的filter过滤框
    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤
    clip.copy('queryMallNavigatorInfo')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容queryGoodsPageRT
    sleep(2)
    value = clip.paste()
    jsonFromWeb1 = json.loads(value)

    jsonDf = {"readyTime": [jsonFromWeb1['result']['readyTime']],
              "scoreDsc": [jsonFromWeb1['result']['scoreDsc']],
              "dsrHide": [jsonFromWeb1['result']['dsrHide']],
              "showFlag": [jsonFromWeb1['result']['showFlag']],
              "stplName": [jsonFromWeb1['result']['stplName']],
              "scoreRk": [jsonFromWeb1['result']['scoreRk']],
              "scoreRegionRank": [jsonFromWeb1['result']['scoreRegionRank']],
              "gmvRk": [jsonFromWeb1['result']['gmvRk']],
              "gmvLevelRk": [jsonFromWeb1['result']['gmvLevelRk']],
              "gmvLevelRk4p5Standard": [jsonFromWeb1['result']['gmvLevelRk4p5Standard']],
              "gmvLevelRk5Standard": [jsonFromWeb1['result']['gmvLevelRk5Standard']],
              "avgDescRevScr3m": [jsonFromWeb1['result']['avgDescRevScr3m']],
              "avgDescRevScr3mRank": [jsonFromWeb1['result']['avgDescRevScr3mRank']],
              "descRevScr3mThreshold": [jsonFromWeb1['result']['descRevScr3mThreshold']],
              "avgDescRevScr3mThreshold": [jsonFromWeb1['result']['avgDescRevScr3mThreshold']],
              "avgDescRevScr3m4p5Standard": [jsonFromWeb1['result']['avgDescRevScr3m4p5Standard']],
              "avgDescRevScr3m5Standard": [jsonFromWeb1['result']['avgDescRevScr3m5Standard']],
              "avgLgstRevScr3m": [jsonFromWeb1['result']['avgLgstRevScr3m']],
              "avgLgstRevScr3mRank": [jsonFromWeb1['result']['avgLgstRevScr3mRank']],
              "lgstRevScr3mThreshold": [jsonFromWeb1['result']['lgstRevScr3mThreshold']],
              "avgLgstRevScr3mThreshold": [jsonFromWeb1['result']['avgLgstRevScr3mThreshold']],
              "avgLgstRevScr3m4p5Standard": [jsonFromWeb1['result']['avgLgstRevScr3m4p5Standard']],
              "avgLgstRevScr3m5Standard": [jsonFromWeb1['result']['avgLgstRevScr3m5Standard']],
              "lgstGotTimelyRto1m": [jsonFromWeb1['result']['lgstGotTimelyRto1m']],
              "lgstGotTimelyRto1mRank": [jsonFromWeb1['result']['lgstGotTimelyRto1mRank']],
              "lgstGotTimelyRto1mThreshold": [jsonFromWeb1['result']['lgstGotTimelyRto1mThreshold']],
              "lgstGotTimelyRto1m4p5Standard": [jsonFromWeb1['result']['lgstGotTimelyRto1m4p5Standard']],
              "lgstGotTimelyRto1m5Standard": [jsonFromWeb1['result']['lgstGotTimelyRto1m5Standard']],
              "nfkAvgSignTime1m": [jsonFromWeb1['result']['nfkAvgSignTime1m']],
              "nfkAvgSignTime1mRank": [jsonFromWeb1['result']['nfkAvgSignTime1mRank']],
              "nfkAvgSignTime1mThreshold": [jsonFromWeb1['result']['nfkAvgSignTime1mThreshold']],
              "nfkAvgSignTime1m4p5Standard": [jsonFromWeb1['result']['nfkAvgSignTime1m4p5Standard']],
              "nfkAvgSignTime1m5Standard": [jsonFromWeb1['result']['nfkAvgSignTime1m5Standard']],
              "avgSlfSucRfProcTime1mMr": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMr']],
              "avgSlfSucRfProcTime1mMrRank": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMrRank']],
              "avgSlfSucRfProcTime1mMrThreshold": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMrThreshold']],
              "avgSlfSucRfProcTime1mMr4p5Standard": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMr4p5Standard']],
              "avgSlfSucRfProcTime1mMr5Standard": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMr5Standard']],
              "avgSlfSucRfProcTime1mMgr": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgr']],
              "avgSlfSucRfProcTime1mMgrRank": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgrRank']],
              "avgSlfSucRfProcTime1mMgrThreshold": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgrThreshold']],
              "avgSlfSucRfProcTime1mMgr4p5Standard": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgr4p5Standard']],
              "avgSlfSucRfProcTime1mMgr5Standard": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgr5Standard']],
              "rplyUsrRto5min": [jsonFromWeb1['result']['rplyUsrRto5min']],
              "rplyUsrRto5minRank": [jsonFromWeb1['result']['rplyUsrRto5minRank']],
              "rplyUsrRto5minThreshold": [jsonFromWeb1['result']['rplyUsrRto5minThreshold']],
              "rplyUsrRto3min": [jsonFromWeb1['result']['rplyUsrRto3min']],
              "rplyUsrRto3minRank": [jsonFromWeb1['result']['rplyUsrRto3minRank']],
              "rplyUsrRto3minThreshold": [jsonFromWeb1['result']['rplyUsrRto3minThreshold']],
              "rplyUsrRto3min4p5Standard": [jsonFromWeb1['result']['rplyUsrRto3min4p5Standard']],
              "rplyUsrRto3min5Standard": [jsonFromWeb1['result']['rplyUsrRto3min5Standard']],
              "dsptSucRfOrdrRto1m": [jsonFromWeb1['result']['dsptSucRfOrdrRto1m']],
              "dsptSucRfOrdrRto1mRank": [jsonFromWeb1['result']['dsptSucRfOrdrRto1mRank']],
              "dsptSucRfOrdrRto1mThreshold": [jsonFromWeb1['result']['dsptSucRfOrdrRto1mThreshold']],
              "dsptSucRfOrdrRto1m4p5Standard": [jsonFromWeb1['result']['dsptSucRfOrdrRto1m4p5Standard']],
              "dsptSucRfOrdrRto1m5Standard": [jsonFromWeb1['result']['dsptSucRfOrdrRto1m5Standard']],
              "nfkAvgCfmShpTime1m": [jsonFromWeb1['result']['nfkAvgCfmShpTime1m']],
              "nfkAvgCfmShpTime1mRank": [jsonFromWeb1['result']['nfkAvgCfmShpTime1mRank']],
              "nfkAvgCfmShpTime1mThreshold": [jsonFromWeb1['result']['nfkAvgCfmShpTime1mThreshold']],
              "nfkAvgCfmShpTime1m4p5Standard": [jsonFromWeb1['result']['nfkAvgCfmShpTime1m4p5Standard']],
              "nfkAvgCfmShpTime1m5Standard": [jsonFromWeb1['result']['nfkAvgCfmShpTime1m5Standard']],
              "mallStarToc": [jsonFromWeb1['result']['mallStarToc']],
              "mallStarTomms": [jsonFromWeb1['result']['mallStarTomms']],
              "isBfc": [jsonFromWeb1['result']['isBfc']],
              "isRps": [jsonFromWeb1['result']['isRps']],
              "isRequireBfc": [jsonFromWeb1['result']['isRequireBfc']],
              "isRequireRps": [jsonFromWeb1['result']['isRequireRps']],
              "bfcOrdrRt": [jsonFromWeb1['result']['bfcOrdrRt']],
              "bfcOrdrRt5Standard": [jsonFromWeb1['result']['bfcOrdrRt5Standard']],
              "isRps5Standard": [jsonFromWeb1['result']['isRps5Standard']]
              }
    df = pd.read_json(json.dumps(jsonDf))
    df['statDate'] = datetime.datetime.now()
    df['shopName'] = jsonFromWebShopName['result']['username']

    df['运行模式'] = runMode
    df['统计月'] = ''
    df['统计周'] = ''
    df['统计日'] = ''
    df['统计日周月'] = ''
    df['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    # 只有日模式
    if runMode == '月模式':
        df['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        df['统计日周月'] = df['统计月']
    if runMode == '周模式':
        df['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                      datetime.datetime.now().day)
        df['统计日周月'] = df['统计周']
    if runMode == '日模式':
        df['statDate'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d")
        df['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        df['统计日周月'] = df['统计日']

    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    df.to_sql(name='拼多多_商家后台_服务数据_店铺领航员', con=conn, if_exists='append', index=False)

    resetGuiAtLastStepCloseF12ToTop()

    return


def deleteDuplicationData(engine):
    # 这里有特殊的逻辑，不同账号，可能有相同的类目，同一天的数据，是一模一样的，因此我们需要去除重复数据的
    # 拼多多_商家后台_服务数据_店铺领航员
    sql = 'delete from a                                                              '
    sql += '    using ' + modeName + ' as a, ' + modeName + ' as b   '
    sql += '    where (a.id < b.id)                                                    '
    sql += '    and (a.statDate = b.statDate and  a.shopName = b.shopName)     '


def executeCatchWeb(engine, conn):
    try:
        LogTaskAndMachine('拼多多_商家后台_服务数据_店铺领航员', engine, conn, '', runMode)
        SPGK(engine, conn)
        deleteDuplicationData(engine)
    except Exception as e:
        traceback.print_exc()
        sendAlert('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1], engine, conn)
        print('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1])
        return
    sendFinalSuccessEmail('●正常完成●' + modeName, '', engine, conn, modeName, runMode)


if __name__ == '__main__':
    # 连接database
    engine, conn = getConn()
    executeCatchWeb(engine, conn)
