import logging
import requests
import json
import psycopg2
import math
import pymysql
import time
import random
from requests.exceptions import RequestException, Timeout

# 配置日志记录
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')


def VsignalsEebDataToDb():
    # edbCodes = "187900000077"
    edbCodes = getParam(edbCodes)
    edbCodesList = edbCodes.split(',')
    grouped_list = []
    print("VSIGNALS每日刷新指标列表:" + edbCodes)

    for i in range(0, len(edbCodesList), 50):
        pair = edbCodesList[i:i + 50]
        grouped_list.append(','.join(pair))
    print("VSIGNALS指标分组:" + str(grouped_list))

    records = []
    metas = []
    refreshMetrics = []  # 将 refreshMetrics 定义在循环外部

    for codes in grouped_list:
        try:
            url = 'https://api.vsignals.cn/dbapi'
            response = requests.post(url, json={
                "api_name": "thjj_meta_indicator",
                "params": {
                    "factor_id": codes,
                    "hasChildren": False
                },
                "token": "7808213bf81f398744882dfc96220dcf"
            })
            response.raise_for_status()  # 检查请求是否成功
            json_data = response.json()  # 直接使用 response.json() 解析 JSON
            logging.info("成功解析JSON响应")
            logging.debug(f"JSON响应内容: {json_data}")

            if json_data['code'] == 0:
                data = json_data.get('data', {})
                logging.debug(f"data 类型: {type(data)}")
                logging.debug(f"data 内容: {data}")

                if isinstance(data, dict) and 'items' in data and len(data['items']) > 0:
                    fields = data.get('fields', [])
                    items = data.get('items', [])

                    for item in items:
                        if len(item) == len(fields):
                            item_dict = {}
                            for field, value in zip(fields, item):
                                item_dict[field] = value

                            metas.append({
                                "edb_metrics_code": "VSIGNALS-" + str(item_dict.get('factor_id')),
                                "third_code": str(item_dict.get('factor_id')),
                                "third_type": "VSIGNALS",
                                "metrics_name": item_dict.get('factor_name'),
                                "metrics_frequence": item_dict.get('freq'),
                                "metrics_unit": item_dict.get('unit'),
                                "metrics_startdate": None,  # 假设没有 startdate
                                "metrics_enddate": None,  # 假设没有 enddate
                                "metrics_update": None,  # 假设没有 update
                                "metrics_data_source": item_dict.get('source'),
                                "edb_catagory": None,  # 假设没有 category
                                "metrics_remark": item_dict.get('meaning'),
                                "metrics_nation": None  # 假设没有 nation
                            })
                            refreshMetrics.append(str(item_dict.get('factor_id')))
                            logging.info(f"成功处理指标: {item_dict.get('factor_id')}")
                else:
                    logging.warning("data 中没有 items 或 items 为空")
            else:
                logging.warning(f"响应码不为0: {json_data.get('code')}")
        except json.JSONDecodeError:
            logging.error("JSON解析错误")
        except KeyError as key_err:
            logging.error(f"键错误: {key_err}")
        except RequestException as req_err:
            logging.error(f"请求错误: {req_err}")
        except Exception as err:
            logging.error(f"其他错误: {err}")

    print("VSIGNALS API元数据请求成功")

    if len(refreshMetrics) > 0:
        try:
            dataCodes = ','.join(refreshMetrics)
            url = 'https://api.vsignals.cn/dbapi'
            response = requests.post(url, json={
                "api_name": "thjj_value_indicator",
                "params": {
                    "factor_id": dataCodes,
                    "hasChildren": False
                },
                "token": "7808213bf81f398744882dfc96220dcf"
            })
            response.raise_for_status()  # 检查请求是否成功
            json_data = response.json()  # 直接使用 response.json() 解析 JSON

            if json_data['code'] == 0:
                data = json_data.get("data", {})
                items = data.get("items", [])

                for item in items:
                    timestamp = int(time.time() * 1000)  # 获取当前时间戳（毫秒级）
                    random_number = random.randint(1000, 9999)  # 生成一个4位随机数
                    records.append({
                        "keys": 'VSIGNALS-' + str(item[1]) + str(item[0]) + '-' + str(timestamp) + '-' + str(
                            random_number),
                        "edb_metrics_code": 'VSIGNALS-' + str(item[1]),
                        "metrics_value": item[3],
                        "data_time": item[0],
                        "third_code": str(item[1]),
                        "third_type": "VSIGNALS"
                    })
        except Timeout:
            print("VSIGNALS API请求超时")
        except RequestException as e:
            print(f"VSIGNALS API请求异常: {e}")
        else:
            print("VSIGNALS API请求成功")

    values = [d['third_code'] for d in metas]
    result = ','.join(values)
    print("VSIGNALS 元数据查询完成,查询条数:" + str(len(metas)))
    print("VSIGNALS 指标值数据数据查询完成,查询条数:" + str(len(records)))

    # 使用环境变量或配置文件来存储敏感信息
    conn = psycopg2.connect(
        host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
        port=80,
        dbname="odpstest",
        user="LTAI5tQo9VtJ414iSEZrE8Vn",
        password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
        application_name="third_edb_data"
    )

    # 使用executemany执行批量插入
    try:
        # 导入数据
        cur = conn.cursor()
        print("开始获取连接")
        # 初始化一个列表来存储批量插入的数据
        # 插入元数据
        insertlen = 0
        batch = []
        # 初始化一个计数器
        counter = 0
        for data in metas:
            # 为每条记录创建一个元组，并添加到批量列表中
            batch.append((
                data['edb_metrics_code'],
                data['edb_catagory'],
                data['third_code'],
                data['third_type'],
                data['metrics_name'],
                data['metrics_unit'],
                data['metrics_frequence'],
                data['metrics_remark'],
                data['metrics_data_source'],
                data['metrics_startdate'],
                data['metrics_enddate'],
                data['metrics_update'],
                data['metrics_nation']
            ))
            # 每收集500条记录，执行一次插入操作
            if counter >= 500:
                # 构造批量插入的SQL语句
                values = ", ".join(
                    ["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                sql = (
                    f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                    f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                    f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                    f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                    f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")

                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += 500
                # 重置计数器和批量列表
                counter = 0
                batch = []
                # 更新计数器
            counter += 1

        # 如果最后一批记录不足500条，也进行插入和提交
        if batch:
            values = ", ".join(["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
            sql = (
                f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")
            # 执行批量插入
            cur.execute(sql, [item for sublist in batch for item in sublist])
            # 提交事务
            conn.commit()
            insertlen += len(batch)
        print("元数据数据写入完成,写入条数:" + str(insertlen))
        # 插入数据
        insertlen = 0;
        batch = []
        # 初始化一个计数器
        counter = 0
        # 准备批量插入的数据
        for data in records:
            # 为每条记录创建一个元组，并添加到批量列表中
            value = float(data['metrics_value'])
            if not math.isnan(value):
                batch.append((
                    data['keys'],
                    data['edb_metrics_code'],
                    data['metrics_value'],
                    data['data_time'],
                    data['third_code'],
                    data['third_type']
                ))
            # 每收集500条记录，执行一次插入操作
            if counter >= 500:
                # 构造批量插入的SQL语句
                values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"

                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += 500
                # 重置计数器和批量列表
                counter = 0
                batch = []
                # 更新计数器
            counter += 1

        # 如果最后一批记录不足500条，也进行插入和提交
        if batch:
            values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
            sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"
            # 执行批量插入
            cur.execute(sql, [item for sublist in batch for item in sublist])
            # 提交事务
            conn.commit()
            insertlen += len(batch)

    except Exception as e:
        print("发生错误：", e)
        # 发生错误时回滚
        conn.rollback()
        return
    finally:
        # 关闭游标和连接
        cur.close()
        conn.close()
    print("指标值数据数据写入完成")
    update_queries = []
    for data in metas:
        index_code = data['edb_metrics_code']
        end_date = data['metrics_enddate']
        update_query = f"""
            UPDATE data_browser.edb_index_center
            SET index_update_time = '{end_date}'
            WHERE index_code = '{index_code}'
            """
        update_queries.append(update_query)
    # 执行更新操作
    update_db('rds50g3807a68zwc9soo969.mysql.rds.aliyuncs.com', 'data_browser', 3306, 'D6a_T3_Brs', update_queries)
    print("Update completed successfully.")


def update_db(host, user, port, password, update_queries):
    conn = pymysql.connect(host=host, user=user, port=port, password=password, charset='utf8')
    try:
        c = conn.cursor()
        for query in update_queries:
            c.execute(query)
        conn.commit()
    except Exception as e:
        conn.rollback()
        raise e
    finally:
        conn.close()


if __name__ == '__main__':
    VsignalsEebDataToDb()

