import traceback
import pyautogui
import time
import pyperclip as clip
import json
import os
import pandas as pd
import datetime
from sqlalchemy import create_engine
import pypinyin
from bs4 import BeautifulSoup
import random
from jsjj.util.configUtil import *
from jsjj.util.dbUtil import *
from jsjj.util.emailUtil import *
from jsjj.util.logUtil import *
from jsjj.util.timeUtil import *
from jsjj.util.mouseMoveUtil import *
import math
import uuid

runMode = '日模式'  # 只有近30天的数据，没有一天，或者固定上周的|页面上是没有任何输入日月周的入口
# 数据中心-服务数据-售后数据+客服数据
modeName = '拼多多_商家后台_服务数据_评价数据_近30天商品评价'  # 写了4个表
globalUUID = str(uuid.uuid1())

# 拼多多_商家后台_服务数据_评价数据_店铺DSR
# 拼多多_商家后台_服务数据_售后数据_top退款商品
# 拼多多_商家后台_服务数据_售后数据

# 左边点击，直到点击到一级类目，该过程不参与循环
def SPGK(engine, conn):
    # 关闭电霸每天晚上会弹出的一个屏蔽页面
    pyautogui.FAILSAFE = False

    sleep(6)
    pyautogui.scroll(10000)

    # 左边--【物流工具】
    viaCommonUrl('https://mms.pinduoduo.com/orders/order/carriage/list')

    # 左边--【服务数据】
    viaCommonUrl('https://mms.pinduoduo.com/sycm/goods_quality/pilot')

    sleep(4)
    pyautogui.keyDown('f12')
    pyautogui.keyUp('f12')

    # 顶部--【售后数据】
    pyautogui.moveTo(491, 234, 6)
    pyautogui.click()

    jsonFromWebShopName = getSjhtShopName()

    # ==============F12的filter过滤框
    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤（售后数据）
    clip.copy('querySaleQualityDetailInfo')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb1 = json.loads(value)

    # 点击客服数据 tab页
    pyautogui.moveTo(664, 231, 6)
    pyautogui.click()

    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤，另外一个客服数据 tab页的数据了
    clip.copy('querySpecifiedDayServiceQuality')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb2 = json.loads(value)

    # ===================================TOP退款商品（近30天）一页，不分页
    # 点击评价tab页，顶部
    pyautogui.moveTo(578, 233, 6)
    pyautogui.click()

    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤，另外一个评价数据 tab页
    clip.copy('querySaleQualityTopGoodsDetailList')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb4 = json.loads(value)

    # ===================================商品评价(近30天)、列表并且需要翻页的
    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤  queryMallDsrVO->
    clip.copy('queryMallDsrVOList')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1534, 325, 6)  # 应该选中第一个
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb5DSR = json.loads(value)

    # =======第一页=======

    # ****************************************翻页的，切换为40条每页的****************************************
    # clear network
    pyautogui.moveTo(1540, 146, 4)
    pyautogui.click()

    pyautogui.moveTo(1135, 659, 6)
    sleep(2)

    pyautogui.scroll(-5000)

    # 点击翻页选择
    pyautogui.moveTo(1021, 954, 4)
    pyautogui.click()

    # 点击40页（到这里，就切换为每页40条了）==>限制了20
    pyautogui.moveTo(1008, 868, 4)
    pyautogui.click()

    # ************************************************************************************************************************

    # filtertextbox
    pyautogui.moveTo(1571, 167, 6)
    pyautogui.doubleClick()

    # 过滤
    clip.copy('queryGoodsEvaluateVO')  # 先复制
    pyautogui.hotkey('ctrl', 'v')  # 再粘贴

    # 选中过滤文件
    pyautogui.moveTo(1540, 325, 6)
    pyautogui.click()

    # 点中json值区域
    pyautogui.moveTo(1812, 497, 6)
    pyautogui.click()

    sleep(2)
    pyautogui.hotkey('ctrl', 'a')

    sleep(2)
    pyautogui.hotkey('ctrl', 'c')

    # 读取剪切板内容
    sleep(2)
    value = clip.paste()
    jsonFromWeb6FirstPage = json.loads(value)

    dfFirstPage = pd.read_json(json.dumps(jsonFromWeb6FirstPage['result']['goodsEvaluates']))
    dfFirstPage['statDate'] = datetime.datetime.now()
    dfFirstPage['shopName'] = jsonFromWebShopName['result']['username']
    dfFirstPage['pageNumber'] = 0

    jsonDf = {"statDate": [jsonFromWeb1['result']['statDate']],
              "dsptRfSucOrdrCnt1m": [jsonFromWeb1['result']['dsptRfSucOrdrCnt1m']],
              "dsptRfSucRto1m": [jsonFromWeb1['result']['dsptRfSucRto1m']],
              "pltInvlOrdrRto1m": [jsonFromWeb1['result']['pltInvlOrdrRto1m']],
              "rfSucRto1m": [jsonFromWeb1['result']['rfSucRto1m']],
              "avgSucRfProcTime1m": [jsonFromWeb1['result']['avgSucRfProcTime1m']],
              "qurfOrdCnt1m": [jsonFromWeb1['result']['qurfOrdCnt1m']],
              "qurfOrdRto1m": [jsonFromWeb1['result']['qurfOrdRto1m']],
              "sucRfOrdrAmt1d": [jsonFromWeb1['result']['sucRfOrdrAmt1d']],
              "sucRfOrdrCnt1d": [jsonFromWeb1['result']['sucRfOrdrCnt1d']],
              "pltInvlOrdrCnt1m": [jsonFromWeb1['result']['pltInvlOrdrCnt1m']],
              "ssslAvgSucRfProcTime1mPct": [jsonFromWeb1['result']['ssslAvgSucRfProcTime1mPct']],
              "bestSsslAvgSucRfProcTime1m": [jsonFromWeb1['result']['bestSsslAvgSucRfProcTime1m']],
              "passSsslAvgSucRfProcTime1m": [jsonFromWeb1['result']['passSsslAvgSucRfProcTime1m']],
              "checkInsurancePunish": [jsonFromWeb1['result']['checkInsurancePunish']],
              "freePunishOrder": [jsonFromWeb1['result']['freePunishOrder']],
              "avgSlfSucRfProcTime1mMgr": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMgr']],
              "avgSlfSucRfProcTime1mMr": [jsonFromWeb1['result']['avgSlfSucRfProcTime1mMr']],
              "checkRfPunish": [jsonFromWeb1['result']['checkRfPunish']],

              "rplyUsrRto5min1d": [jsonFromWeb2['result']['customerServiceTrendVO']['rplyUsrRto5min1d']],
              "in3minRplyUsrRto1d": [jsonFromWeb2['result']['customerServiceTrendVO']['in3minRplyUsrRto1d']],
              "avgRplyTime1d": [jsonFromWeb2['result']['customerServiceTrendVO']['avgRplyTime1d']],
              "cfmOrdrAmt3d": [jsonFromWeb2['result']['customerServiceTrendVO']['cfmOrdrAmt3d']],
              "inqorUsrCnt1dRatio": [jsonFromWeb2['result']['customerServiceTrendVO']['inqorUsrCnt1dRatio']]
              }
    df = pd.read_json(json.dumps(jsonDf))
    # df['statDate'] = datetime.datetime.now()  使用的返回值
    df['shopName'] = jsonFromWebShopName['result']['username']

    # 对jsonFromWeb4，TOP退款商品（近30天）数据的处理
    dfSingleList = pd.read_json(json.dumps(jsonFromWeb4['result']))
    dfSingleList['statDate'] = datetime.datetime.now()
    dfSingleList['shopName'] = jsonFromWebShopName['result']['username']

    df['运行模式'] = runMode
    df['统计月'] = ''
    df['统计周'] = ''
    df['统计日'] = ''
    df['统计日周月'] = ''
    df['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    if runMode == '月模式':
        df['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        df['统计日周月'] = df['统计月']
    if runMode == '周模式':
        df['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                      datetime.datetime.now().day)
        df['统计日周月'] = df['统计周']
    if runMode == '日模式':
        df['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        df['统计日周月'] = df['统计日']
    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    df.to_sql(name='拼多多_商家后台_服务数据_售后数据', con=conn, if_exists='append', index=False)

    dfSingleList['uuid'] = globalUUID     # 起码多次运行，可以使用uuid区分开
    dfSingleList['运行模式'] = runMode
    dfSingleList['统计月'] = ''
    dfSingleList['统计周'] = ''
    dfSingleList['统计日'] = ''
    dfSingleList['统计日周月'] = ''
    dfSingleList['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    if runMode == '月模式':
        dfSingleList['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        dfSingleList['统计日周月'] = dfSingleList['统计月']
    if runMode == '周模式':
        dfSingleList['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                                datetime.datetime.now().day)
        dfSingleList['统计日周月'] = dfSingleList['统计周']
    if runMode == '日模式':
        dfSingleList['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        dfSingleList['统计日周月'] = dfSingleList['统计日']
    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    dfSingleList.to_sql(name='拼多多_商家后台_服务数据_售后数据_top退款商品', con=conn, if_exists='append', index=False)

    # 新增加，店铺DSR我们作为一张单独的表进行存储
    dfDSR = pd.read_json(json.dumps(jsonFromWeb5DSR['result']))
    dfDSR['shopName'] = jsonFromWebShopName['result']['username']
    dfDSR['运行模式'] = runMode
    dfDSR['统计月'] = ''
    dfDSR['统计周'] = ''
    dfDSR['统计日'] = ''
    dfDSR['统计日周月'] = ''
    dfDSR['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    if runMode == '月模式':
        dfDSR['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        dfDSR['统计日周月'] = dfDSR['统计月']
    if runMode == '周模式':
        dfDSR['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                         datetime.datetime.now().day)
        dfDSR['统计日周月'] = dfDSR['统计周']
    if runMode == '日模式':
        dfDSR['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        dfDSR['统计日周月'] = dfDSR['统计日']
    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    dfDSR.to_sql(name='拼多多_商家后台_服务数据_评价数据_店铺DSR', con=conn, if_exists='append', index=False)

    dfFirstPage['uuid'] = globalUUID
    dfFirstPage['运行模式'] = runMode
    dfFirstPage['统计月'] = ''
    dfFirstPage['统计周'] = ''
    dfFirstPage['统计日'] = ''
    dfFirstPage['统计日周月'] = ''
    dfFirstPage['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    if runMode == '月模式':
        dfFirstPage['统计月'] = str(datetime.datetime.now().month - 1) + "月"
        dfFirstPage['统计日周月'] = dfFirstPage['统计月']
    if runMode == '周模式':
        dfFirstPage['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                               datetime.datetime.now().day)
        dfFirstPage['统计日周月'] = dfFirstPage['统计周']
    if runMode == '日模式':
        dfFirstPage['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
            "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
        dfFirstPage['统计日周月'] = dfFirstPage['统计日']
    # 解决可能出现的超时问题bugfix 2020.08.18
    conn.connection.connection.ping(reconnect=True)
    dfFirstPage.to_sql(name='拼多多_商家后台_服务数据_评价数据_近30天商品评价', con=conn, if_exists='append', index=False)

    # 我们已近切换为20条每页了
    for i in range(1, math.ceil(jsonFromWeb6FirstPage['result']['mallOprGoodsCntStd'] / 20)):

        # 清除network记录
        pyautogui.moveTo(1542, 143, 6)
        pyautogui.click()

        # 从第二页开始循环
        pyautogui.moveTo(1005, 864, 2)
        sleep(2)
        pyautogui.scroll(-50000)
        pyautogui.click()

        # 下一页（这个位置可能变化的????）
        pyautogui.moveTo(1370, 959, 6)
        pyautogui.click()

        # 等一下，否则40条每页出不来
        sleep(6)

        # filter框
        pyautogui.moveTo(1522, 168, 6)
        pyautogui.doubleClick()

        # 过滤
        clip.copy('queryGoodsEvaluateVO')  # 先复制
        pyautogui.hotkey('ctrl', 'v')  # 再粘贴

        # 选中过滤文件
        pyautogui.moveTo(1540, 325, 6)
        pyautogui.click()

        # 点中json值区域
        pyautogui.moveTo(1812, 497, 6)
        pyautogui.click()

        sleep(2)
        pyautogui.hotkey('ctrl', 'a')

        sleep(2)
        pyautogui.hotkey('ctrl', 'c')

        # 读取剪切板内容
        sleep(2)
        value = clip.paste()
        jsonFromWeb7Loop = json.loads(value)

        dfLoop2 = pd.read_json(json.dumps(jsonFromWeb7Loop['result']['goodsEvaluates']))
        dfLoop2['statDate'] = datetime.datetime.now()
        dfLoop2['shopName'] = jsonFromWebShopName['result']['username']
        dfLoop2['pageNumber'] = i

        dfLoop2['uuid'] = globalUUID
        dfLoop2['运行模式'] = runMode
        dfLoop2['统计月'] = ''
        dfLoop2['统计周'] = ''
        dfLoop2['统计日'] = ''
        dfLoop2['统计日周月'] = ''
        dfLoop2['插入时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        if runMode == '月模式':
            dfLoop2['统计月'] = str(datetime.datetime.now().month - 1) + "月"
            dfLoop2['统计日周月'] = dfLoop2['统计月']
        if runMode == '周模式':
            dfLoop2['统计周'] = getLastWeekOfYear(datetime.datetime.now().year, datetime.datetime.now().month,
                                               datetime.datetime.now().day)
            dfLoop2['统计日周月'] = dfLoop2['统计周']
        if runMode == '日模式':
            dfLoop2['统计日'] = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime(
                "%Y-%m-%d %H:%M:%S")  # 减一天，统计的是昨天的数据
            dfLoop2['统计日周月'] = dfLoop2['统计日']
        # 解决可能出现的超时问题bugfix 2020.08.18
        conn.connection.connection.ping(reconnect=True)
        dfLoop2.to_sql(name='拼多多_商家后台_服务数据_评价数据_近30天商品评价', con=conn, if_exists='append', index=False)

    resetGuiAtLastStepCloseF12ToTop()

    return


def deleteDuplicationData(engine):
    # 这里有特殊的逻辑，不同账号，可能有相同的类目，同一天的数据，是一模一样的，因此我们需要去除重复数据的
    # =====================================================================================================
    # 拼多多_商家后台_服务数据_评价数据_店铺DSR
    sql = 'delete from a                                                              '
    sql += '    using 拼多多_商家后台_服务数据_评价数据_店铺DSR as a, 拼多多_商家后台_服务数据_评价数据_店铺DSR as b   '
    sql += '    where (a.id < b.id)                                                    '
    sql += '    and (a.statDate = b.statDate and a.运行模式=b.运行模式 and a.shopName=b.shopName )     '
    engine.execute(sql)

    # 拼多多_商家后台_服务数据_售后数据

    sql = 'delete from a                                                              '
    sql += '    using 拼多多_商家后台_服务数据_售后数据 as a, 拼多多_商家后台_服务数据_售后数据 as b   '
    sql += '    where (a.id < b.id)                                                    '
    sql += '    and (a.statDate = b.statDate and a.运行模式=b.运行模式 and a.shopName=b.shopName )     '
    engine.execute(sql)


def executeCatchWeb(engine, conn):
    try:
        LogTaskAndMachine('拼多多_商家后台_服务数据_评价数据_近30天商品评价', engine, conn, '', runMode)
        # 因为这里运行不完的风险比较大，因此在这里最大程度避免重复数据，一开始先运行一下了
        deleteDuplicationData(engine)
        SPGK(engine, conn)
        deleteDuplicationData(engine)
    except Exception as e:
        traceback.print_exc()
        sendAlert('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1], engine, conn)
        print('【异常中断】' + modeName, '异常:' + str(e) + '|报错文件:' + os.path.split(__file__)[-1])
        return
    sendFinalSuccessEmail('●正常完成●' + modeName, '', engine, conn, modeName, runMode)


if __name__ == '__main__':
    engine, conn = getConn()
    executeCatchWeb(engine, conn)
