# -- coding: utf-8 --
# @time : 2023/5/31
# @author : 周梦泽
# @file : relatedAnalysis.py
# @software: pycharm
# 获取相关分析-生意参谋-市场-相关分析
import json
import time
import pandas as pd
import pyautogui
from DrissionPage import WebPage
from autoTask.operaitonAid.utils.df2obj import DataRow
from autoTask.taobao.sycm.utils.close_Ad import close_ad
from autoTask.taobao.sycm.utils.selectDays import select_days
from autoTask.taobao.sycm.xgfx import CacheAction
from autoTask.taobao.taobao_login import Login
from common.logger.log import log_
from common.utils.CacheUtil import init_cache
from common.utils.ExcelUtil import pd_paste_2_csv
from common.utils.ThreadUtil import before_enable_stop_abel_thread
from common.utils.web_page_util import MockTap, Input, Tap, random_delay
from common.utils.web_page_util import create_page

pyautogui.FAILSAFE = False
sycm_url = 'https://sycm.taobao.com/'  # 生意参谋首页url


def df2datarowobj(df) -> json:
    """
    将dataframe转换为DataRow对象
    :param df:pandas.DataFrame表格
    :return:objects:DataRow对象
    """
    df = df.rename(columns={"关键词": "name", "日期": "cycle", "搜索人数": "searchPeopleNum", "搜索次数": "searchCount",
                            "点击率": "clickRate", "点击人数": "clickPeopleNum", "点击次数": "clickCount",
                            "交易金额": "transactionAmount", "支付转化率": "paymentConversionRate",
                            "在线商品数": "onlineGoodsNum", "商城点击占比": "mallClickRate",
                            "直通车参考价": "ztcReferencePrice",
                            "支付人数": "paymentPeopleNum", "客单价": "unitPrice",
                            "搜索人数/在线商品数": "searchUsersPerGoods",
                            "交易金额/在线商品数": "transactionAmountPerGoods", "访客平均价值": "visitorAverageValue",
                            "商品平均价值": "goodsAverageValue", "商品访客竞争度": "visitorCompetitionIndex",
                            "优质价值词": "highQualityValueWords", "竞争度": "competitionDegree",
                            "预估转化件数": "estimatedConversionQuantity", "平均交易指数": "averageTransactionIndex"})
    objects = []
    for index, row in df.iterrows():
        data_dict = row.to_dict()
        obj = DataRow(**data_dict)
        objects.append(obj)
    json_data = json.dumps(objects, default=lambda obj: obj.to_dict(), ensure_ascii=False)
    return json_data


class RelatedAnalysis:

    def __init__(self, params: dict, hot_term_list: list):
        """
        初始化参数
        params: 任务参数，包含
                    ['account': 账号 str,
                   'password':密码 str,
                   'date_cycle': 搜索天数 int,
                   ]
        """
        # 提取参数

        self.account = params['account']
        self.password = params['password']
        self.date_cycle = params['dateCycle']
        self.hot_term_list = hot_term_list
        self.params = params
        self.page = create_page()

    @staticmethod
    def deep_analysis(page: WebPage, date_cycle: int, hot_term_list: list):
        """
        生意参谋->搜索分析 -> 想关分析 -> 选择天数 -> 小旺神一键转化 -> 一键复制
        :param page: WebPage对象
        :param date_cycle: 数据周期
        :param hot_term_list: 搜索排行榜关键词列表

        :return:objects 页面对象
        """

        # hot_term_list = data['关键词'].tolist()[:keyword_num]
        # 获取关键词列表[:None]全部关键词
        # page = self.page
        # date_cycle = self.date_cycle
        log_.info('正在打开生意参谋')
        # 跳转到生意参谋首页
        if sycm_url not in page.url:
            page.get(sycm_url)
            time.sleep(2)
        market = page("x://ul[@class='menu-list clearfix']")("tx:市场")
        MockTap(ele=market, msg='点击市场失败', before_delay=(3, 6))()
        search_rank = page("x://div[contains(@class, 'op-ebase-leftMenu')]")("tx:搜索排行")
        MockTap(search_rank, msg='点击搜索排行失败', before_delay=(3, 6))()
        while page("资源加载中...", timeout=1) or page("正在努力加载...", timeout=1):
            time.sleep(3)
            page.refresh()
            time.sleep(2)
        if page("x://p[@class='oui-dt-message-content']/span", timeout=2):
            if "数据拥堵" in page("x://p[@class='oui-dt-message-content']/span", timeout=2).text:
                page.refresh()
                log_.warning("数据拥堵")
        search_jump = page.eles("x://span[contains(@class,'sycm-mc-link-td-wrapper')]/a[1]")[0]
        # MockTap(search_jump, msg='点击搜索分析失败', before_delay=(3, 6))(page)
        random_delay((3, 6))
        page.get(search_jump.link)
        page.set.main_tab(page.to_tab(page.latest_tab))
        # 把最新的标签也改为主标签页
        MockTap('相关分析', msg='点击相关分析失败', before_delay=(3, 6))(page)
        page, date_cycle = select_days(page, date_cycle)
        data_df_list = []
        for i in range(len(hot_term_list)):
            # 循环将词填入搜索框-搜索-点击小旺神一键转化-点击一键复制-更新缓存-写入文件
            # 如果缓存存在，直接写入文件
            df_result = CacheAction.get_search_key_df(hot_term_list[i], str(date_cycle) + '天')
            if df_result is not None:
                log_.info('缓存已存在')
                data_df_list.append(df_result)
                continue
            log_.info('正在点击 搜索分析，\n开始搜索第{}'.format(i + 1) + '个热门搜索词:{}'.format(hot_term_list[i]))
            Input("x://span[@class='oui-canary-input ant-input-affix-wrapper']/input", msg='输入热门分析词失败',
                  vals=hot_term_list[i] + '\n', before_delay=(3, 6), after_delay=(3, 6))(page)
            log_.info('数据加载中')
            Tap(ele="x://button/span/span", msg='点击小旺神转化失败', before_delay=(1, 2))(page)
            if page('没有可转化的指数！', timeout=2):
                log_.warning('没有可转化的指数！,跳过该词')
                continue
            if page('数据更新失败，请刷新页面重试！', timeout=2):
                log_.info('数据更新失败，请刷新页面重试！')
                page.refresh()
            log_.info('点击一键复制')
            Tap(' 一键复制', msg='一键复制失败', before_delay=(1, 2), after_delay=(1, 3))(page)
            log_.info('正在转化数据')

            data = pd_paste_2_csv()
            # 更新缓存
            CacheAction.put_cache_by_df(hot_term_list[i], str(date_cycle) + '天', data, )
            data_df_list.append(data)

            log_.info('转化完成,关闭小旺神')
            time.sleep(1)
            pyautogui.press('esc')
        if not data_df_list:
            raise Exception('没有数据')
        merged_df = pd.concat(data_df_list, ignore_index=True)
        # 合并所有数据
        merged_df.drop_duplicates(subset=['关键词'], inplace=True)
        # 数据去重
        merged_df.replace('-', float('nan'), inplace=True)
        # 将-替换为nan
        merged_df.replace(float('nan'), 0, inplace=True)
        merged_df = merged_df.apply(pd.to_numeric, errors='ignore')
        merged_df = merged_df.applymap(
            lambda x: round(float(x.strip('%')) / 100, 2) if isinstance(x, str) and x.endswith('%') else x)
        # 去除%号将百分比转化为小数
        analysis_objects = df2datarowobj(merged_df)
        # 将表格转化为DataRow对象
        return analysis_objects, merged_df

    @before_enable_stop_abel_thread(target_func=close_ad, is_loop=True)
    def __call__(self):
        """
        入口函数
        获取缓存，如果缓存存在，直接返回
        如果缓存不存在，执行deep_analysis-更新缓存
        :return:
        """
        merged_df = init_cache(self.params).get_search_key_df(key_type='xgfx')
        if merged_df is not None and not merged_df.empty:
            # 如果返回值不是 None 并且 DataFrame 不为空
            log_.info('缓存存在,使用缓存数据')
            merged_df.drop_duplicates(subset=['关键词'], inplace=True)
            analysis_objects = df2datarowobj(merged_df)
            return analysis_objects
        page = self.page
        if 'sycm.taobao.com' not in page.url:
            self.page, _ = Login(self.account, self.password).execute()
        analysis_objects, merged_df = self.deep_analysis(page, self.date_cycle, self.hot_term_list)
        init_cache(self.params).put_cache_by_df(key_type="xgfx", df=merged_df)
        # 更新缓存
        return analysis_objects
