import traceback
import pyautogui
import os
import time
import pyperclip as clip
import json
import pandas as pd
from datetime import datetime
from sqlalchemy import create_engine
from jsjj.util.configUtil import *
from jsjj.util.dbUtil import *
from jsjj.util.emailUtil import *
from jsjj.util.logUtil import *
from jsjj.util.timeUtil import *
from jsjj.util.mouseMoveUtil import *

runMode = '周模式'
# 拼多多-商家后台-流量数据-流量看板（没有和其他数据整合）
modeName = '拼多多_商家后台_流量数据_流量看板'
# 左边点击，直到点击到一级类目，该过程不参与循环
def SPGK(engine, conn):
    getcurrentPriority()
    # 关闭电霸每天晚上会弹出的一个屏蔽页面
    pyautogui.FAILSAFE = False

    # 菜单看不到，因此要移动一下才能看到
    sleep(6)
    pyautogui.scroll(10000)

    # 左边--【物流工具】
    viaCommonUrl('https://mms.pinduoduo.com/orders/order/carriage/list')

    # sleep(4)
    # pyautogui.scroll(-50)
    #
    # sleep(4)
    # pyautogui.keyDown('f12')
    # pyautogui.keyUp('f12')
    #
    # # 左边--【流量数据】
    # pyautogui.moveTo(168, 996, 6)
    # pyautogui.click()
    #
    # # 页面重新移上去
    #
    # pyautogui.moveTo(1439, 550, 6)
    # pyautogui.scroll(500)
    # sleep(2)
    sleep(2)
    pyautogui.keyDown('f12')
    pyautogui.keyUp('f12')
    sleep(2)
    # ************修改为url方式，菜单位置总是变化【流量数据】************************************************************
    viaCommonUrl('https://mms.pinduoduo.com/sycm/search_data/plate')

    jsonFromWebShopName = getSjhtShopName()

    # ==============F12的filter过滤框
    pyautogui.moveTo(1502, 167, 6)
    pyautogui.doubleClick()

    # 过滤（实时数据）
    clip.copy('queryMallFlowRtData')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容queryGoodsPageRT
    sleep(2)
    value = clip.paste()
    jsonFromWeb1 = json.loads(value)

    # fix bug 移开json区域
    pyautogui.moveTo(1466, 186, 4)

    # ==============F12的filter过滤框
    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤（只有前30天的数据），返回结果中有 statDate
    clip.copy('queryMallFlowOverViewList')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件（第二个文件才是对的），由于取了list，因此我们 又是取的第一个文件
    pyautogui.moveTo(1539, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb2 = json.loads(value)

    # ????
    # jsonDf = {'guvRt': [jsonFromWeb1['result']['guvRt']],
    #           'gpvRt': [jsonFromWeb1['result']['gpvRt']],
    #           'mpvRt': [jsonFromWeb1['result']['mpvRt']],
    #           'muvRt': [jsonFromWeb1['result']['muvRt']],

              # 'cfmOrdrUsrCnt': [jsonFromWeb2['result']['cfmOrdrUsrCnt']],
              # 'cfmOrdrUsrCntPct': [jsonFromWeb2['result']['cfmOrdrUsrCntPct']],
              # 'cfmOrdrUsrCntIsPercent': [jsonFromWeb2['result']['cfmOrdrUsrCntIsPercent']],
              # 'cfmOrdrCnt': [jsonFromWeb2['result']['cfmOrdrCnt']],
              # 'cfmOrdrCntPct': [jsonFromWeb2['result']['cfmOrdrCntPct']],
              # 'cfmOrdrCntIsPercent': [jsonFromWeb2['result']['cfmOrdrCntIsPercent']],
              # 'cfmOrdrAmt': [jsonFromWeb2['result']['cfmOrdrAmt']],
              # 'cfmOrdrAmtPct': [jsonFromWeb2['result']['cfmOrdrAmtPct']],
              # 'cfmOrdrAmtIsPercent': [jsonFromWeb2['result']['cfmOrdrAmtIsPercent']],
              # 'cfmOrdrAup': [jsonFromWeb2['result']['cfmOrdrAup']],
              # 'cfmOrdrAupPct': [jsonFromWeb2['result']['cfmOrdrAupPct']],
              # 'cfmOrdrAupIsPercent': [jsonFromWeb2['result']['cfmOrdrAupIsPercent']],
              # 'uv': [jsonFromWeb2['result']['uv']],
              # 'uvPct': [jsonFromWeb2['result']['uvPct']],
              # 'uvIsPercent': [jsonFromWeb2['result']['uvIsPercent']],
              # 'pv': [jsonFromWeb2['result']['pv']],
              # 'pvPct': [jsonFromWeb2['result']['pvPct']],
              # 'pvIsPercent': [jsonFromWeb2['result']['pvIsPercent']],
              # 'guv': [jsonFromWeb2['result']['guv']],
              # 'guvPct': [jsonFromWeb2['result']['guvPct']],
              # 'guvIsPercent': [jsonFromWeb2['result']['guvIsPercent']],
              # 'gpv': [jsonFromWeb2['result']['gpv']],
              # 'gpvPct': [jsonFromWeb2['result']['gpvPct']],
              # 'gpvIsPercent': [jsonFromWeb2['result']['gpvIsPercent']],
              # 'cfmUvRto': [jsonFromWeb2['result']['cfmUvRto']],
              # 'cfmUvRtoPct': [jsonFromWeb2['result']['cfmUvRtoPct']],
              # 'cfmUvRtoIsPercent': [jsonFromWeb2['result']['cfmUvRtoIsPercent']],
              # 'uvCfmVal': [jsonFromWeb2['result']['uvCfmVal']],
              # 'uvCfmValPct': [jsonFromWeb2['result']['uvCfmValPct']],
              # 'uvCfmValIsPercent': [jsonFromWeb2['result']['uvCfmValIsPercent']],
              # 'payUvRto': [jsonFromWeb2['result']['payUvRto']],
              # 'payUvRtoPct': [jsonFromWeb2['result']['payUvRtoPct']],
              # 'payUvRtoIsPercent': [jsonFromWeb2['result']['payUvRtoIsPercent']],
              # 'payOrdrAup': [jsonFromWeb2['result']['payOrdrAup']],
              # 'payOrdrAupPct': [jsonFromWeb2['result']['payOrdrAupPct']],
              # 'payOrdrAupIsPercent': [jsonFromWeb2['result']['payOrdrAupIsPercent']]
              #}
    df = pd.read_json(json.dumps(jsonFromWeb2['result']))
    df['guvRt'] = jsonFromWeb1['result']['guvRt']
    df['gpvRt'] = jsonFromWeb1['result']['gpvRt']
    df['mpvRt'] = jsonFromWeb1['result']['mpvRt']
    df['muvRt'] = jsonFromWeb1['result']['muvRt']
    # df['statDate'] = datetime.datetime.now()   df应该取上面的值

    df['shopName'] = jsonFromWebShopName['result']['username']

    df['运行模式'] = runMode
    df['统计月'] = ''
    df['统计周'] = ''
    df['统计日'] = ''
    df['统计日周月'] = ''
    df['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    if runMode == '月模式':
        df['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        df['统计日周月'] = df['统计月']
    if runMode == '周模式':
        df['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                      datetime.datetime.now().day)
        df['统计日周月'] = df['统计周']
    if runMode == '日模式':
        df['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        df['统计日周月'] = df['统计日']
    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    df.to_sql(name='拼多多_商家后台_流量数据_流量看板', con=conn, if_exists='append', index=False)


    resetGuiAtLastStepCloseF12ToTop()

    return

def deleteDuplicationData(engine):
    # 这里有特殊的逻辑，不同账号，可能有相同的类目，同一天的数据，是一模一样的，因此我们需要去除重复数据的
    # 拼多多_商家后台_交易数据
    sql = 'delete from a                                                              '
    sql += '    using 拼多多_商家后台_流量数据_流量看板 as a, 拼多多_商家后台_流量数据_流量看板 as b   '
    sql += '    where (a.id < b.id)                                                    '
    sql += '    and (a.statDate = b.statDate and a.shopName=b.shopName )     '
    engine.execute(sql)

def executeCatchWeb(engine, conn):
    try:
        LogTaskAndMachine('拼多多_商家后台_流量数据_流量看板', engine, conn, '', runMode)
        SPGK(engine, conn)
        deleteDuplicationData(engine)
    except Exception as e:
        traceback.print_exc()
        sendAlert('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1], engine, conn)
        print('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1])
        return
    sendFinalSuccessEmail('●正常完成●' + modeName, '', engine, conn, modeName, runMode)


if __name__ == '__main__':
    engine, conn = getConn()
    executeCatchWeb(engine, conn)
