import traceback
import pyautogui
import time
import pyperclip as clip
import json
import pandas as pd
from datetime import datetime
from sqlalchemy import create_engine
import pypinyin
from bs4 import BeautifulSoup
import random
from jsjj.util.configUtil import *
from jsjj.email.sendEmail import *
from urllib import parse
from jsjj.util.dirUtil import *
from jsjj.util.emailUtil import *
from jsjj.util.timeUtil import *
from threading import Thread

# 原时旗舰店
# 立太友泰专卖店
shopName = '立太友泰专卖店'
tableName = '拼多多_竞争对手商品信息_debug'
downloadFilePath = 'C:/Users/Administrator/Downloads'


# 前提是good的信息抓全了
# C:/Users/Administrator/Downloads
def SPGK(engine, conn):
    # 保护措施，避免失控
    pyautogui.FAILSAFE = False
    # 为所有的PyAutoGUI函数增加延迟。默认延迟时间是0.1秒。
    pyautogui.PAUSE = 0.2

    # 启动一个线程来检查文件是否生成了
    t = Thread(target=checkEmail, args=(10,), daemon=True)
    t.start()

    # 删除上次的文件
    # delFile(downloadFilePath)

    sql = "select * from " + tableName + " where 店铺名称='" + shopName + "' and 阿明工具断点续传标志='0' "
    df = pd.read_sql(sql=sql, con=engine)
    sleep(6)

    for index, row in df.iterrows():
        # 挨个抓取商品具体页面的信息（在有安装阿明工具的页面），我的机器是火狐
        # bug fix
        pyautogui.moveTo(98, 108, 1)
        pyautogui.moveTo(185, 53, 1)
        pyautogui.click()

        # url输入框
        pyautogui.moveTo(398, 54, 2)
        pyautogui.click()

        sleep(2)
        pyautogui.hotkey('ctrl', 'c')

        sleep(2)
        url = clip.paste()

        bits = list(parse.urlparse(url))
        qs = parse.parse_qs(bits[4])

        # 替换url链接中的goodsid
        qs['goods_id'][0] = row['goods_id']
        bits[4] = parse.urlencode(qs, True)
        newurl = parse.urlunparse(bits)
        clip.copy(newurl)

        # bug fix
        pyautogui.moveTo(185, 53, 1)
        pyautogui.moveTo(95, 111, 1)
        pyautogui.click()

        # url输入框
        pyautogui.moveTo(185, 53, 1)
        pyautogui.moveTo(398, 54, 2)
        pyautogui.click()

        sleep(2)
        pyautogui.hotkey('ctrl', 'v')

        sleep(2)
        pyautogui.hotkey('enter')

        # 访问另外一个网址后，通过阿明工具导出excel文件，最后汇总excel文件
        pyautogui.moveTo(1411, 431, 6)
        pyautogui.click()

        # 点击导出文件
        pyautogui.moveTo(1393, 322, 4)
        pyautogui.click()

        # 选择保存文件 radio button
        pyautogui.moveTo(816, 577, 2)
        pyautogui.click()

        # 选择保存文件 radio button
        pyautogui.moveTo(1039, 634, 2)
        pyautogui.click()

        # esc 信息
        pyautogui.moveTo(1039, 634, 4)
        pyautogui.click()

        sleep(2)
        pyautogui.hotkey('esc')

        engine.execute(" update " + tableName + " set 阿明工具断点续传标志='1' where goods_id='" + row['goods_id'] + "' ")
        # 循环频率
        sleep(6)

    # 到这里，文件已经全部截取下来了，挨个处理文件入库
    filelist = getRawFileList(downloadFilePath)
    for file in filelist[0]:
        if "desktop.ini" in file or "~" in file:
            continue
        df = pd.read_csv(file)
        df = df.dropna(axis=0, how='all')
        # 解决可能出现的超时问题bugfix 2020.08.18
        conn.connection.connection.ping(reconnect=True)
        df.to_sql(name='拼多多_阿明工具汇总_debug', con=conn, if_exists='append', index=False)

    return


def executeCatchWeb(engine, conn):
    try:
        SPGK(engine, conn)
    except Exception as e:
        sendEmail('【任务中断】自动收集阿明工具任务异常中断', '自动收集阿明工具任务异常中断，请登录物理机/虚拟机进行检查，并重新启动任务！' + str(e))
        return


def checkEmail(n):
    # 在线程中一直循环
    while True:
        sleep(90)
        mtime = time.ctime(os.path.getmtime(getLastFiles(downloadFilePath)))
        # 如果超过两分钟，那么就发电子邮件进行警告，就需要人工干预，看看是什么原因
        if compareDateTime(mtime) > 120:  # 【now-传入】
            sendEmail('【任务中断】自动收集阿明工具任务异常中断', '自动收集阿明工具任务异常中断【下载阿明文件并未按时生成】，请登录物理机/虚拟机进行检查，并重新启动任务！')


if __name__ == '__main__':
    # 连接database
    engine = create_engine('mysql+pymysql://jsbi:jsbi-1701@47.114.55.19:9011/biv1?charset=utf8')
    con = engine.connect()
    # 本函数调试用
    executeCatchWeb(engine, con)
