import ast
import datetime
import time
from urllib.parse import quote_plus

import pymysql
import requests
import json

from sqlalchemy import create_engine


from mysql_ini import *

# def mysql_config():
#     user = 'root'
#     password = quote_plus('Jison0820')
#     # passwd ='merchantsasd123!@%&'
#     host = 'rm-bp1be6s3581xp13rfco.mysql.rds.aliyuncs.com'
#     # port1 ='3306'
#     dbname2 = 'data-warehouse'
#     engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
#     return engine2
def mysql_config():
    user = mysql_config_ini()["user"]
    # password = quote_plus('QlciCnfTB9NtLSC')
    password = mysql_config_ini()["password"]
    # passwd ='merchantsasd123!@%&'
    host = mysql_config_ini()["host"]
    # port1 ='3306'
    dbname2 = mysql_config_ini()["database"]
    engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
    return engine2


# 假设的接口请求参数
hour_index_type = {
    "play_count": "1",
    "play_avg_time": "8",
    "play_finish_ratio": "7",
    "digg_count": "3",
    "comment_count": "4",
    "share_count": "5",
    "new_fans_count": "9"
}


# 假设的类型时间（这个需要根据实际情况来定义）
type_time = "2"


# 发送请求的函数
def send_request(item_id, index_type_key, index_type_value, type_time,cookies):
    play_count = {
        'index_type': index_type_value,
        'index_type_key': index_type_key,
        'index_data_window': type_time,
        'item_id': item_id
    }

    # 构建完整的请求数据（这里假设接口需要接收一个名为'data'的JSON字段）
    request_data = {
        'hour_index_type': hour_index_type,  # 发送整个hour_index_type字典，或者根据API要求调整
        'play_count': play_count  # 发送play_count字典
        # 如果还有其他参数，也需要在这里添加
    }

    request_data = request_data["play_count"]
    # print(request_data)
    # 发送POST请求
    headers = {

        "Cookie": cookies
        # 'Content-Type': 'application/json',  # 确保发送的是JSON格式的数据
        # 可能还需要添加其他请求头，如认证信息、Cookie等（根据实际情况添加）
    }
    time.sleep(0.5)
    url = 'https://creator.douyin.com/web/api/creator/data/item/option/?aid=2906&app_name=aweme_creator_platform&device_platform=web&referer=https:%2F%2Fcreator.douyin.com%2Fcreator-micro%2Fcontent%2Fmanage&user_agent=Mozilla%2F5.0+(Macintosh%3B+Intel+Mac+OS+X+10_15_7)+AppleWebKit%2F537.36+(KHTML,+like+Gecko)+Chrome%2F129.0.0.0+Safari%2F537.36&cookie_enabled=true&screen_width=1512&screen_height=982&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Mozilla&browser_version=5.0+(Macintosh%3B+Intel+Mac+OS+X+10_15_7)+AppleWebKit%2F537.36+(KHTML,+like+Gecko)+Chrome%2F129.0.0.0+Safari%2F537.36&browser_online=true&timezone_name=Asia%2FShanghai&publish_time=1727942387&mcn_type=0&msToken=fMVoKDIG88LQkkhtl2Z2GbcBjXgP6Y3l5rsd6FJQZfqV6CuXPyT_HshGh9aaEBH5CN_WmtBbg00sjNJqbuuWNU0D0II5VqurOkN4YUoIYthyFC7-6yj5np7flZab&X-Bogus=DFSzswVY5EtANj1qtB83QQIm4L7z&_signature=_02B4Z6wo00001wIj5TgAAIDC4ugu6GY3Aj8CI-GAAKeZp63bpOSXZvhVLh6E1A3zWFbmpUBBtHF98pdXyBXNzr.2cEWkWcFcxGteN-zyQizVm8YmqdBcaOMo.VErChNYlBVgLMn47WJObam51e'
    response = requests.get(url, headers=headers, params=request_data)
    trend_data = response.json()["trend_data"]
    json_frame = {index_type_key: trend_data, "aweme_id": item_id}
    return json_frame
    # json_frame_re = pd.DataFrame([json_frame])
    # json_frame_re = json_frame_re.astype("str")
    # pd.io.sql.to_sql(json_frame_re, 'two_two_test',
    #                  mysql_config(),
    #                  schema='data-warehouse',
    #                  if_exists='append',
    #                  index=False)
    # 处理响应
    # if response.status_code == 200:
    #     print(f"Success for item_id {item_id}: {response.json()}")
    # else:
    #     print(f"Failed for item_id {item_id}: {response.status_code}, {response.text}")

    # 遍历item_id列表并发送请求

import pandas as pd
def job1(douyin_account_id,cookies):
    info_data = pd.read_sql("select * from data_warehouse_douyin_aweme_info_data where douyin_code='{}'".format(douyin_account_id),mysql_config())
    data_list = [info_data.loc[i].to_dict() for i in info_data.index.values]

    start_time = datetime.datetime.now()
    play_count = []
    play_avg_time = []
    play_finish_ratio = []
    digg_count = []
    comment_count = []
    share_count = []
    new_fans_count = []
    for item_id in data_list:
        for index_type_key, index_type_value in hour_index_type.items():
            result = send_request(item_id["aweme_id"], index_type_key, index_type_value, type_time,cookies)

            if 'play_count' in result:
                play_count.append(result)
            if 'play_avg_time' in result:
                play_avg_time.append(result)
            if 'play_finish_ratio' in result:
                play_finish_ratio.append(result)
            if 'digg_count' in result:
                digg_count.append(result)
            if 'comment_count' in result:
                comment_count.append(result)
            if 'share_count' in result:
                share_count.append(result)
            if 'new_fans_count' in result:
                new_fans_count.append(result)

    play_count_frame = pd.DataFrame(play_count)
    play_avg_time_frame = pd.DataFrame(play_avg_time)
    play_avg_time_frame = play_avg_time_frame.drop(["aweme_id"],axis=1)
    play_finish_ratio_frame = pd.DataFrame(play_finish_ratio)
    play_finish_ratio_frame = play_finish_ratio_frame.drop(["aweme_id"],axis=1)

    digg_count_frame = pd.DataFrame(digg_count)
    digg_count_frame = digg_count_frame.drop(["aweme_id"],axis=1)

    comment_count_frame = pd.DataFrame(comment_count)
    comment_count_frame = comment_count_frame.drop(["aweme_id"],axis=1)

    share_count_frame = pd.DataFrame(share_count)
    share_count_frame = share_count_frame.drop(["aweme_id"],axis=1)

    new_fans_count_frame = pd.DataFrame(new_fans_count)
    new_fans_count_frame = new_fans_count_frame.drop(["aweme_id"],axis=1)

    data_result = pd.concat([play_count_frame,play_avg_time_frame,play_finish_ratio_frame,digg_count_frame,comment_count_frame,share_count_frame,new_fans_count_frame],axis=1)

    data_result = data_result.astype("str")
    print(data_result)
    print(data_result.columns)
    end_time = datetime.datetime.now()

    final_time = end_time - start_time
    print(final_time.seconds)
    data_result["douyin_account_id"] = douyin_account_id
    # pd.io.sql.to_sql(data_result, 'two_two_test1',
    #                      mysql_config(),
    #                      schema='data-warehouse',
    #                      if_exists='append',
    #                      index=False)
    return data_result

def pymysql_fetchall(sql):

    con = pymysql.connect(host=mysql_config_ini()["host"], port=mysql_config_ini()["port"], user=mysql_config_ini()["user"],
                          passwd=mysql_config_ini()["password"], db=mysql_config_ini()["database"], charset='utf8')
    # 获取操作数据的对象 cursor
    cursor = con.cursor()
    cursor.execute(sql)
    sql_result = cursor.fetchall()
    # 提交事务
    con.commit()
    # 关闭Cursor
    cursor.close()
    # 关闭链接
    con.close()

    return sql_result

def by_day_code(douyin_account_id,cookies):
    # data = pd.read_sql("select * from two_two_test1",mysql_config())
    data = job1(douyin_account_id,cookies)
    douyin_account_id = data["douyin_account_id"].values.tolist()[0]
    data = data.drop(["douyin_account_id"],axis=1)
    df = data.applymap(lambda x: ast.literal_eval(x) if isinstance(x, str) else x)

    data_list = [df.loc[i].to_dict() for i in df.index.values]


    play_count = []
    play_avg_time = []
    play_finish_ratio = []
    digg_count = []
    comment_count = []
    share_count = []
    new_fans_count = []
    for i in data_list:

        for j in i["play_count"]:
            play_count.append({'play_count': j["douyin_value"],"date_time":j["key"],"aweme_id":i["aweme_id"],"douyin_code":douyin_account_id})

        for j1 in i["play_avg_time"]:
            print(j1)
            play_avg_time.append({'play_avg_time': j1["douyin_value"]})

        for j2 in i["play_finish_ratio"]:
            play_finish_ratio.append({'play_finish_ratio': j2["value"]})

        for j3 in i["digg_count"]:
            digg_count.append({'digg_count': j3["douyin_value"]})

        for j4 in i["comment_count"]:
            comment_count.append({'comment_count': j4["douyin_value"]})

        for j5 in i["share_count"]:
            share_count.append({'share_count': j5["douyin_value"]})

        for j6 in i["new_fans_count"]:
            new_fans_count.append({'new_fans_count': j6["douyin_value"]})

    play_count = pd.DataFrame(play_count)
    play_avg_time = pd.DataFrame(play_avg_time)
    play_finish_ratio = pd.DataFrame(play_finish_ratio)
    comment_count= pd.DataFrame(comment_count)
    digg_count= pd.DataFrame(digg_count)
    share_count = pd.DataFrame(share_count)
    new_fans_count = pd.DataFrame(new_fans_count)




    by_day_frame_total = pd.concat([play_count, play_avg_time, play_finish_ratio, comment_count, digg_count, share_count, new_fans_count], axis=1)
    by_day_frame_total.rename(
        columns={"play_avg_time": "ave_play_count", "play_finish_ratio": "completion_rate", "digg_count": "like_count",
                 "new_fans_count": "Increase_followers_count"}, inplace=True)

    by_day_frame_total['cover_click_rate'] = 0
    by_day_frame_total['favorite_count'] = 0
    by_day_frame_total['danmaku_count'] = 0
    by_day_frame_total["cover_click_rate"] = by_day_frame_total['cover_click_rate'].astype(float)
    by_day_frame_total["favorite_count"] = by_day_frame_total['favorite_count'].astype(float)
    by_day_frame_total["danmaku_count"] = by_day_frame_total['danmaku_count'].astype(float)

    # delete_sql_trends = "DELETE FROM data_warehouse_douyin_video_contribute_by_day_data WHERE douyin_account_id='{}'".format(douyin_account_id)
    # pymysql_fetchall(delete_sql_trends)
    pd.io.sql.to_sql(by_day_frame_total, 'data_warehouse_douyin_video_contribute_by_day_data',
                             mysql_config(),
                             schema='data-warehouse',
                             if_exists='append',
                             index=False)

    return by_day_frame_total

# job2('dyz4jzxumvaf')