import ast

import requests

import pandas as pd
import base64
import json
import os
import shutil
import time
import random
from collections import OrderedDict
from urllib.parse import quote_plus
import redis
import requests
from sqlalchemy import create_engine
from DrissionPage import ChromiumPage
from DrissionPage import ChromiumOptions
from DrissionPage.common import Settings
from DrissionPage.common import Keys
from DrissionPage.common import Actions
import pymysql
from DrissionPage.errors import *

import datetime

from sqlalchemy.sql.coercions import expect


"""
https://customer.xiaohongshu.com/api/cas/customer/web/qr-code?service=https:%2F%2Fcreator.xiaohongshu.com&qr_code_id=68c517395516719772176435"""

class DY_XPIDER:
    def __init__(self,user_id,douyin_code):
        self.CO = ChromiumOptions()  ##创建chrome实例
        # self.user_id = self.user_input()  ##用户
        self.user_id = str(user_id)  ##用户
        self.douyin_code = str(douyin_code)  ##用户

        self.PORT = self.port_input()  ##chrome端口
        self.CO.set_paths(browser_path="browser_path='/Applications/Google Chrome.app/Contents/MacOS/Google Chrome")  ##chrome启动路径
        self.CO.set_local_port(self.PORT)  ##生成的端口 这里是手动生成
        self.CO.set_user_data_path(os.getcwd() + '/user_info_dir/{}'.format(self.user_id))  ##用户chrome文件夹
        # self.CO.arguments.append('--headless')
        self.ch = ChromiumPage(self.CO)


        self.ch.set.window.max()  ##chrome窗口最大化
        self.ac = Actions(self.ch)
        # self.fans = self.xhs_user_info_requests()
        # print(self.user_id)
        # self.redis_conn = redis.StrictRedis(host='r-bp162522qrwjh5e7jhpd.redis.rds.aliyuncs.com', port=6379,
        #                                     password='123sdmmh568%$#vs*!', db=21)
        self.redis_conn = redis.StrictRedis(host='r-bp1y4wxs1fa8vxd1xspd.redis.rds.aliyuncs.com', port=6379,
                                            password='Jison0820', db=0)

    """根据用户ID查出端口号"""

    def port_fe(self):
        sql = "select spider_user_id,spider_port,spider_login_code from dy_creator_center where spider_user_id='{}'".format(
            self.user_id)
        spider_config = self.pymysql_fetchall(sql)
        port = []
        for i in spider_config:
            port.append(i[1])

        return ''.join(port)

    """自动输入端口号"""

    def port_input(self):

        port1 = random.randint(5000, 9000)

        cc = self.port_jiance()
        while True:
            cc = self.port_jiance()
            if cc == "用户存在":
                # print("当前用户存在")
                port_ = self.port_fe()
                # print(port_)
                return port_
            if cc == "端口不存在":
                # print(port1)
                return port1
            else:
                return self.PORT

    """自动随机生成端口 并进行检测"""

    def port_jiance(self):
        port1 = random.randint(5000, 9000)

        sql = "select spider_user_id,spider_port,spider_login_code from dy_creator_center"
        spider_config = self.pymysql_fetchall(sql)
        port_list = []
        # print(spider_config)
        for i in spider_config:
            port_list.append(i[1])
        user_exists = self.user_state()
        # print(user_exists, '************')
        if user_exists == "用户不存在":
            if port1 not in port_list:
                return "端口不存在"
            else:
                return "端口存在"
        else:
            return '用户存在'

    """查询数据库"""

    def pymysql_fetchall(self, sql):

        con = pymysql.connect(host='rm-bp1be6s3581xp13rfco.mysql.rds.aliyuncs.com', port=3306, user='root',
                              passwd='Jison0820', db='data-warehouse', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql)
        sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return sql_result

    """从数据库判断用户是否存在"""

    def user_state(self):
        sql = "select spider_user_id,spider_port,spider_login_code from dy_creator_center"
        result = self.pymysql_fetchall(sql)
        user_list = []
        for i in result:
            user_list.append(i[0])
        # print(user_list)
        if self.user_id in user_list:
            # print("存在")
            return "用户存在"
        else:
            return "用户不存在"

    """判断用户是否在数据库中 不在为新用户 在的话为老用户"""

    def user_info(self):
        engine2 = self.mysql_config()
        data_frame = pd.DataFrame([{"spider_user_id": str(self.user_id), "spider_port": str(self.PORT)}])
        # print(data_frame)
        # pd.io.sql.to_sql(data_frame, 'xhs_user_info', engine2, schema='live_data',if_exists='append',index=False)
        return data_frame


    # def mysql_config(self):
    #     user = 'live_data'
    #     password = quote_plus('CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ')
    #     # passwd ='merchantsasd123!@%&'
    #     host = 'rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com'
    #     # port1 ='3306'
    #     dbname2 = 'live_data'
    #     engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
    #     return engine2


    def mysql_config(self):
        user = 'root'
        password = quote_plus('Jison0820')
        # passwd ='merchantsasd123!@%&'
        host = 'rm-bp1be6s3581xp13rfco.mysql.rds.aliyuncs.com'
        # port1 ='3306'
        dbname2 = 'data-warehouse'
        engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
        return engine2


    """获取cookies"""
    def get_cookies(self):
        cookies_str = ''
        for i in self.ch.cookies(all_domains=False, all_info=True):
            my_ordered_dict = OrderedDict(i)

            # 取出前两个键值对
            first_two_items = list(my_ordered_dict.items())[:2]
            key_value_str = ''
            count_key_value = 0
            for i in first_two_items:
                count_key_value = count_key_value + 1
                if count_key_value == 1:
                    key_value_str = key_value_str + i[1] + '='
                else:
                    key_value_str = key_value_str + i[1]
            cookies_str = cookies_str + key_value_str + ";"
        return cookies_str


    """数据中心 by_day数据处理"""
    def update_and_flatten_data(self,data):
        updated_data = {}
        for key, value_list in data.items():
            # print(value_list[0])
            # 假设每个键下的列表只有一个元素
            item = value_list[0]
            # item['value'] = item['douyin_value']
            try:
                del item['xigua_value']
                del item['douyin_value']
            # print(item)
                del item['date_time']
            except:
                pass
            for k, va in item.items():
                # print(k,va)
                result = {key: va}
                # print(result)
                updated_data.update(result)
        return updated_data


    ##获取前一天时间
    def get_before_date(self):

        from datetime import datetime, timedelta

        # 获取当前时间
        current_time = datetime.now()

        # 计算前一天的时间
        previous_day_time = current_time - timedelta(days=1)

        # 打印结果
        print("当前时间:", current_time)
        print("前一天时间:", previous_day_time)

        return previous_day_time

    """获取数据中心前一天数据接口"""
    def data_center_video_count_by_day(self, cookies,douyin_unique_id,thirty_data):
        current_time = datetime.datetime.now()
        # current_time = current_time.date()

        user_json = {"douyin_code":douyin_unique_id,"update_time":str(current_time),"date":self.get_before_date()}

        # current_time = str(current_time)[:-7]
        url = 'https://creator.douyin.com/janus/douyin/creator/data/overview/dashboard'

        headers = {
            "Cookie": cookies
        }

        data = {

            "recent_days": 1

        }
        re = requests.get(url, json=data, headers=headers)

        total_center_json = re.json()
        total_value_json = {}
        trends_value_json = {}
        print(total_center_json)
        for i in total_center_json["metrics"]:
            if i=={}:
                continue

            total_value = {i["english_metric_name"]: i["metric_value"]}
            total_value_json.update(total_value)
            trends_value = {i['english_metric_name']:i["trends"]}
            trends_value_json.update(trends_value)

        # print(trends_value_json)
        by_day_json = self.update_and_flatten_data(trends_value_json)
        by_day_json.update(user_json)

        """趋势图数据计算"""

        # delete_sql_trends = "DELETE FROM data_warehouse_douyin_video_by_day_data WHERE douyin_code='{}'".format(douyin_unique_id)
        # self.pymysql_fetchall(delete_sql_trends)
        print("删除数据成功,等待插入最新数据")


        trends_frame = pd.DataFrame([by_day_json])
        # trends_frame = trends_frame.astype("str")
        trends_frame.rename(columns = {"play_cnt":"play_count","total_fans_cnt":"total_fans","cover_click_ratio":"cover_click_rate","cover_ratio":"cover_setting_rate","cancel_fans_cnt":"take_off_fans",
                                                      "net_fans_cnt":"net_growth_fans","comment_cnt":"works_comment","share_count":"works_share","homepage_view_cnt":"homepage_access","digg_cnt":"works_like"},inplace = True)
        count_frame = pd.read_sql("select count(*) from data_warehouse_douyin_video_by_day_data where douyin_code='{}'".format(douyin_unique_id),self.mysql_config())
        num_count = int(count_frame.values.tolist()[0][0])
        if num_count < 31:
            thirty_data = thirty_data.drop(thirty_data.index[-1]).reset_index(drop=True)
            pd.io.sql.to_sql(thirty_data, 'data_warehouse_douyin_video_by_day_data', self.mysql_config(),
                             schema='data-warehouse',
                             if_exists='append',
                             index=False)
        pd.io.sql.to_sql(trends_frame, 'data_warehouse_douyin_video_by_day_data', self.mysql_config(), schema='data-warehouse',
                         if_exists='append',
                         index=False)

        print("最新数据插入成功")


    def get_seven_thirty(self,data):
        simplified_data = {}
        # 遍历原始数据的每个类别
        for category, entries in data.items():
            simplified_entries = []
            # 遍历每个条目并只保留 value
            for entry in entries:
                simplified_entries.append(entry['value'])
                # 将简化后的条目存储在新的字典中
            simplified_data[category] = simplified_entries

        # 打印简化后的数据
        # print(simplified_data)
        return simplified_data

    """数据中心7天数据处理，加上日期"""
    def add_seven_date(self, data_list):
        from datetime import datetime, timedelta

        # 假设这是当前时间的前一天（因为要求是从前一天开始算）
        # 注意：这里我们使用了一个固定的日期作为示例，但在实际应用中，你应该使用datetime.now() - timedelta(days=1)来获取实际的前一天
        current_day_minus_one = datetime.now()  # 示例日期，请替换为datetime.now() - timedelta(days=1)的实际结果

        # 计算前7天的日期，并以倒序方式添加到每个字典中
        for i, data in enumerate(reversed(data_list)):
            # 计算日期，注意这里i是从0开始的，所以要加1来得到正确的天数偏移
            date = (current_day_minus_one - timedelta(days=i + 1)).strftime('%Y-%m-%d')
            data['date'] = date

        # 由于我们是倒序遍历的，所以最终data_list中的顺序是正确的（即日期是倒序的）
        # 打印结果查看

        date_result_list = []
        for data in data_list:
            date_result_list.append(data)

        return date_result_list

    def seven_huizong_data(self,df):
        # df = pd.read_sql("select * from data_warehouse_douyin_video_by_seven_data where ", self.mysql_config())
        df = df.drop(["cover_click_rate", "cover_setting_rate"], axis=1)
        df['date'] = pd.to_datetime(df['date'], format="%Y-%m-%d")

        grouped_sum = df.groupby('douyin_code')[
            ["play_count", "works_like", "works_share", "works_comment", "net_growth_fans", "take_off_fans",
             "homepage_access"]].sum().reset_index()

        idx = df.groupby('douyin_code')['date'].idxmax()

        max_time_rows = df.loc[idx]

        max_time_values = max_time_rows[['douyin_code', 'total_fans']].reset_index(drop=True)
        # print(max_time_values)
        # 步骤3：合并结果
        final_result = pd.merge(grouped_sum, max_time_values, on='douyin_code')

        print(final_result)
        current_time = datetime.datetime.now()

        final_result['update_time'] = current_time
        return final_result

    """获取数据中心7天数据接口"""
    def data_center_video_count_seven(self, cookies, douyin_unique_id):
        current_time = datetime.datetime.now()


        user_json = {"douyin_code": douyin_unique_id, "update_time": str(current_time), "date": self.get_before_date()}

        # current_time = str(current_time)[:-7]
        url = 'https://creator.douyin.com/janus/douyin/creator/data/overview/dashboard'

        headers = {
            "Cookie": cookies
        }

        data = {

            "recent_days": 7

        }
        re = requests.get(url, json=data, headers=headers)

        total_center_json = re.json()
        print(total_center_json,"**********")
        total_value_json = {}
        trends_value_json = {}
        for i in total_center_json["metrics"]:
            total_value = {i["english_metric_name"]: i["metric_value"]}
            total_value_json.update(total_value)
            trends_value = {i['english_metric_name']: i["trends"]}
            trends_value_json.update(trends_value)

        seven_json = self.get_seven_thirty(trends_value_json)
        # seven_json.update(user_json)

        # 获取所有键的列表
        keys = seven_json.keys()

        # 假设所有列表长度相同，我们可以取第一个列表的长度作为循环次数
        num_entries = len(seven_json['play_cnt'])

        # 拆分数据
        entries = []
        for i in range(num_entries):
            entry = {}
            for key in keys:
                entry[key] = seven_json[key][i]
            entries.append(entry)

            # 打印拆分后的数据
        json_list = []
        for entry in entries:
            entry.update(user_json)
            json_list.append(entry)

        seven_json_result = self.add_seven_date(json_list)
        # print(seven_json_result)
        seven_frame = pd.DataFrame(seven_json_result)
        seven_frame.rename(
            columns={"play_cnt": "play_count", "total_fans_cnt": "total_fans", "cover_click_ratio": "cover_click_rate",
                     "cover_ratio": "cover_setting_rate", "cancel_fans_cnt": "take_off_fans",
                     "net_fans_cnt": "net_growth_fans", "comment_cnt": "works_comment", "share_count": "works_share",
                     "homepage_view_cnt": "homepage_access", "digg_cnt": "works_like"}, inplace=True)
        print(seven_frame)

        final_result = self.seven_huizong_data(seven_frame)
        # delete_sql_trends = "DELETE FROM data_warehouse_douyin_video_by_seven_data WHERE douyin_code='{}'".format(douyin_unique_id)
        # self.pymysql_fetchall(delete_sql_trends)
        # pd.io.sql.to_sql(seven_frame, 'data_warehouse_douyin_video_by_seven_data', self.mysql_config(),
        #                  schema='data-warehouse',
        #                  if_exists='append',
        #                  index=False)
        pd.io.sql.to_sql(final_result, 'data_warehouse_douyin_video_by_seven_data', self.mysql_config(),
                         schema='data-warehouse',
                         if_exists='append',
                         index=False)
        # print("最新数据插入成功")

    """获取数据中心30天数据接口"""

    def data_center_video_count_thirty(self, cookies, douyin_unique_id):
        current_time = datetime.datetime.now()

        user_json = {"douyin_code": douyin_unique_id, "update_time": str(current_time),
                     "date": self.get_before_date()}

        # current_time = str(current_time)[:-7]
        url = 'https://creator.douyin.com/janus/douyin/creator/data/overview/dashboard'

        headers = {
            "Cookie": cookies
        }

        data = {

            "recent_days": 30

        }
        re = requests.get(url, json=data, headers=headers)

        total_center_json = re.json()
        total_value_json = {}
        trends_value_json = {}
        for i in total_center_json["metrics"]:
            total_value = {i["english_metric_name"]: i["metric_value"]}
            total_value_json.update(total_value)
            trends_value = {i['english_metric_name']: i["trends"]}
            trends_value_json.update(trends_value)

        seven_json = self.get_seven_thirty(trends_value_json)
        # seven_json.update(user_json)

        # 获取所有键的列表
        keys = seven_json.keys()

        # 假设所有列表长度相同，我们可以取第一个列表的长度作为循环次数
        num_entries = len(seven_json['play_cnt'])

        # 拆分数据
        entries = []
        for i in range(num_entries):
            entry = {}
            for key in keys:
                entry[key] = seven_json[key][i]
            entries.append(entry)

            # 打印拆分后的数据
        json_list = []
        for entry in entries:
            entry.update(user_json)
            json_list.append(entry)

        seven_json_result = self.add_seven_date(json_list)
        # print(seven_json_result)
        seven_frame = pd.DataFrame(seven_json_result)
        seven_frame.rename(
            columns={"play_cnt": "play_count", "total_fans_cnt": "total_fans", "cover_click_ratio": "cover_click_rate",
                     "cover_ratio": "cover_setting_rate", "cancel_fans_cnt": "take_off_fans",
                     "net_fans_cnt": "net_growth_fans", "comment_cnt": "works_comment", "share_count": "works_share",
                     "homepage_view_cnt": "homepage_access", "digg_cnt": "works_like"}, inplace=True)
        # print(seven_frame)

        final_result = self.seven_huizong_data(seven_frame)
        # delete_sql_trends = "DELETE FROM data_warehouse_douyin_video_by_thirty_data WHERE douyin_code='{}'".format(douyin_unique_id)
        # self.pymysql_fetchall(delete_sql_trends)
        # seven_frame["cover_setting_rate"] = seven_frame["cover_setting_rate"].astype("float")
        # pd.io.sql.to_sql(seven_frame, 'data_warehouse_douyin_video_by_thirty_data', self.mysql_config(),
        #                  schema='data-warehouse',
        #                  if_exists='append',
        #                  index=False)
        pd.io.sql.to_sql(final_result, 'data_warehouse_douyin_video_by_thirty_data', self.mysql_config(),
                                          schema='data-warehouse',
                                          if_exists='append',
                                          index=False)
        print("最新数据插入成功")

        return seven_frame

    def dy_creator_center_spider(self):

        print('用户:',self.user_id, '此次的端口是:',self.PORT)
        time.sleep(2)
        self.ch.get('https://creator.douyin.com/creator-micro/home')
        time.sleep(2)
        user_it_exist = self.user_state()
        data_frame = self.user_info()
        if user_it_exist == "用户不存在":
            data_frame["spider_login_code"] = '0'
            pd.io.sql.to_sql(data_frame, 'dy_creator_center', self.mysql_config(), schema='data-warehouse',
                             if_exists='append', index=False)

        # print("走到这里了")
        login_url_is = self.ch.url
        time.sleep(2)

        if login_url_is == "https://creator.douyin.com/":
            print("授权登录过期")
            self.ch.close()
            return "授权登录过期"

        else:

            self.redis_conn.set('dy_center_qrcode:{}'.format(self.user_id), '"no_login"')
            self.redis_conn.expire('dy_center_qrcode:{}'.format(self.user_id), 300)


        time.sleep(5)
        """获取抖音创作中心的用户详情"""
        # detail_result = self.dy_user_info_detail()
        # fans_num = detail_result[0]
        # douyin_unique_id = detail_result[1]
        douyin_unique_id = self.douyin_code
        """获取抖音cookies"""
        cookies = self.get_cookies()
        try:
            data_center_status = self.ch.ele('xpath://*[@id="creator-home-left-content-id"]/div[2]/div[2]/div/div/button/span/span[2]')
            print(data_center_status.text,"*************")
            if data_center_status.text == '立即开通':
                self.ch.ele(
                    'xpath://*[@id="creator-home-left-content-id"]/div[2]/div[2]/div/div/button/span/span[2]').click()
                time.sleep(1)
                self.ch.close()
                return "暂未开通数据中心权限"
            # else:
            #     print("暂未开通数据中心权限")
            #     return "暂未开通数据中心权限"
        except:
            pass
        try:
            center_data_thirty = self.data_center_video_count_thirty(cookies,douyin_unique_id)
            center_data_by_day = self.data_center_video_count_by_day(cookies,douyin_unique_id,center_data_thirty)
            center_data_seven = self.data_center_video_count_seven(cookies,douyin_unique_id)
        except:
            pass
        self.ch.close()
        return "程序计算完成"

def mysql_config():
    user = 'root'
    password = quote_plus('Jison0820')
    # passwd ='merchantsasd123!@%&'
    host = 'rm-bp1be6s3581xp13rfco.mysql.rds.aliyuncs.com'
    # port1 ='3306'
    dbname2 = 'data-warehouse'
    engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
    return engine2

def dy_by_day_job():
    # where  dy_spider_user_id='a09272c9754b42469756366026adbacb_15'
    ### where dy_spider_user_id='42d8174b0c944842af05a6453c80c1e4_24'
    data = pd.read_sql("select * from dy_user_info",mysql_config())
    data_list = [data.loc[i].to_dict() for i in data.index.values]

    for i in data_list:

        dy_center = DY_XPIDER(user_id=i["dy_spider_user_id"],douyin_code=i["douyin_unique_id"])
        aa = dy_center.dy_creator_center_spider()
        print(aa)

    print(datetime.datetime.now())
    return "抖音创作者中心每日计算完成"


print(dy_by_day_job())


