import hmac
import logging
from _sha256 import sha256
from datetime import datetime, time
from random import random
from urllib.parse import urlencode
import requests
import json
import psycopg2
import math
import pymysql
from requests.exceptions import RequestException, Timeout


CICC_BASE_API_URL = 'https://www.research.cicc.com/api'
APP_ID = 'app/api@thfund.com.cn'
APP_SECRET = 'aidwtw4uove3nxx1khsepwjmtejfb5vc'

# 配置日志记录
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')


def WindEebDataToDb():
    edbCodes=getParam(edbCodes)
    beginDate=getParam(beginDate)
    endDate=getParam(endDate)
    edbType=getParam(edbType)
    #edbType ='CICC'
    if edbType == 'WD':
        # edbCodes='M0048666,M0048665,M0048660,M0048657,M0048659,M0048661,M0048663,M0048662,M0048664,M5567876,M0096212,M0000553,M0096211'
        # beginDate='2000-01-01'
        # endDate='2024-11-18'
        edbCodesList = edbCodes.split(',')
        # 初始化一个空列表来存储分组
        # 初始化一个空列表来存储分组后的字符串
        grouped_list = []
        print("wind每日刷新指标列表:"+edbCodes)
        # 遍历列表，每次取两个元素进行分组
        for i in range(0, len(edbCodesList), 50):
            # 取出当前索引和下一个索引的元素，如果下一个索引超出范围，则只取当前索引的元素
            pair = edbCodesList[i:i+50]
            # 将当前组的元素用逗号连接，并添加到结果列表中
            grouped_list.append(','.join(pair))
        # 打印结果列表
        print("wind指标分组:"+str(grouped_list))
        records=[]
        metas=[]
        for codes in grouped_list:
            refreshMetrics=[]
            try:
                url = 'http://10.74.194.151:8832/windapi?apiName=edb&edbCodes='+codes+'&originalResult=true&param=field_info=True'
                # 发送POST请求
                response = requests.post(url, data={})
                json_data = json.loads(response.text)
                if json_data['code']==0:
                    for j in range(len(json_data["data"][0])):
                        metrics_update=json_data["data"][6][j]
                        # 采集更新时间大于开始时间
                        if metrics_update>beginDate:
                            metas.append({
                                "edb_metrics_code": "WD-"+json_data["data"][0][j],
                                "third_code": json_data["data"][0][j],
                                "third_type":"WD",
                                "metrics_name": json_data["data"][1][j],
                                "metrics_frequence": json_data["data"][2][j],
                                "metrics_unit": json_data["data"][3][j],
                                "metrics_startdate": json_data["data"][4][j],
                                "metrics_enddate": json_data["data"][5][j],
                                "metrics_update": json_data["data"][6][j],
                                "metrics_data_source": json_data["data"][7][j],
                                "edb_catagory": json_data["data"][8][j],
                                "metrics_remark": json_data["data"][9][j],
                                "metrics_nation": json_data["data"][10][j]
                            })
                            refreshMetrics.append(json_data["data"][0][j])
                        else:
                            print("不用更新的wind数据指标:"+json_data["data"][0][j]+",最新更新时间:"+metrics_update)
            except Timeout:
                # 捕获超时异常
                print("WDAPI元数据请求请求超时")
            except RequestException as e:
                # 捕获其他请求相关的异常
                print(f"WDAPI元数据请求请求异常: {e}")
            else:
                # 如果没有异常发生，处理响应
                print("WDAPI元数据请求成功")
            if len(refreshMetrics)>0:
                try:
                    dataCodes = ','.join(refreshMetrics)
                    url = 'http://10.74.194.151:8832/windapi?apiName=edb&edbCodes='+dataCodes+'&beginDate='+beginDate+'&endDate='+endDate+'&originalResult=true'
                    # 发送POST请求
                    response = requests.post(url, data={})
                    data = json.loads(response.text)

                    if data['code']==0:
                        datas=data['data']
                        times=data['times']
                        codes=data['codes']
                        for index, value in enumerate(codes):
                            dataList=datas[index]
                            for dindex, dvalue in enumerate(dataList):
                                records.append({
                                    "keys": 'WIND-' +value + times[dindex],  # 主键
                                    "edb_metrics_code": 'WD-' + value,  # 主键
                                    "metrics_value": dvalue,  # 值可以是任何数据类型
                                    "data_time": times[dindex],  # 确保使用正确的键名和字符串值
                                    "third_code": value,
                                    "third_type": "WD"
                                })
                except Timeout:
                    # 捕获超时异常
                    print("WDAPI请求超时")
                except RequestException as e:
                    # 捕获其他请求相关的异常
                    print(f"WDAPI请求异常: {e}")
                else:
                    # 如果没有异常发生，处理响应
                    print("WDAPI请求成功")
        values = [d['third_code'] for d in metas]
        result = ','.join(values)
        print(beginDate+"之后有更新的wind数据指标:"+result)
        print("wind元数据查询完成,查询条数:"+str(len(metas)))
        print("wind指标值数据数据查询完成,查询条数:"+str(len(records)))
        conn = psycopg2.connect(host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
                                port=80,
                                dbname="odpstest",
                                user="LTAI5tQo9VtJ414iSEZrE8Vn",
                                password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
                                application_name="third_edb_data")
        # 使用executemany执行批量插入
        try:
            # 导入数据
            cur = conn.cursor()
            print("开始获取连接")
            # 初始化一个列表来存储批量插入的数据
            # 插入元数据
            insertlen=0;
            batch = []
            # 初始化一个计数器
            counter = 0;
            for data in metas:
                # 为每条记录创建一个元组，并添加到批量列表中
                batch.append((
                    data['edb_metrics_code'],
                    data['edb_catagory'],
                    data['third_code'],
                    data['third_type'],
                    data['metrics_name'],
                    data['metrics_unit'],
                    data['metrics_frequence'],
                    data['metrics_remark'],
                    data['metrics_data_source'],
                    data['metrics_startdate'],
                    data['metrics_enddate'],
                    data['metrics_update'],
                    data['metrics_nation']
                ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                    sql = (f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                           f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                           f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                           f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                           f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen+= 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                sql = (f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                       f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                       f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                       f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                       f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen+= len(batch)
            print("元数据数据写入完成,写入条数:"+str(insertlen))
            # 插入数据
            insertlen=0;
            batch = []
            # 初始化一个计数器
            counter = 0
            # 准备批量插入的数据
            for data in records:
                # 为每条记录创建一个元组，并添加到批量列表中
                value = float(data['metrics_value'])
                if not math.isnan(value):
                    batch.append((
                        data['keys'],
                        data['edb_metrics_code'],
                        data['metrics_value'],
                        data['data_time'],
                        data['third_code'],
                        data['third_type']
                    ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                    sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen+= 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen+= len(batch)

        except Exception as e:
            print("发生错误：", e)
            # 发生错误时回滚
            conn.rollback()
        finally:
            # 关闭游标和连接
            cur.close()
            conn.close()
        print("指标值数据数据写入完成")
        update_queries = []
        for data in metas:
            index_code = data['edb_metrics_code']
            end_date = data['metrics_enddate']
            update_query = f"""
            UPDATE data_browser.edb_index_center
            SET index_update_time = '{end_date}'
            WHERE index_code = '{index_code}'
            """
            update_queries.append(update_query)
        # 执行更新操作
        update_db('rds50g3807a68zwc9soo969.mysql.rds.aliyuncs.com', 'data_browser', 3306, 'D6a_T3_Brs', update_queries)
        print("Update completed successfully.")

    elif edbType == 'CICC':
        # edbCiccCodes='IND000000000074404,CICCIND000000000116219,IND000000000074402,IND000000000074405,CICCIND000000000050350,CICCIND000000000050351,CICCIND000000000050353,CICCIND000000000001431,CICCIND000000000001429,CICCIND000000000001250,CICCIND000000000001432,CICCIND000000000100390,CICCIND000000000100382,CICCIND000000000100379,CICCIND000000000050346,CICCIND000000000114212,CICCIND000000000114218'
        records = []
        metas = []
        edbCodesList = edbCodes.split(',')
        frequency_map = {
            "DAY": "日",
            "WEEK": "周",
            "MONTH": "月",
            "QUARTER": "季",
            "HALF_YEAR": "半年",
            "YEAR": "年"
        }
        try:
            for ciccIndex in edbCodesList:
                print("开始采集" + ciccIndex)
                # 指标定义
                json_data = request_cicc_ind_def(ciccIndex)
                meta = json_data["data"]
                metas.append({
                    "edb_metrics_code": "CICC-" + meta["indDerId"],
                    "third_code": meta["indDerId"],
                    "third_type": "CICC",
                    "metrics_name": meta["indDerName"],
                    "metrics_frequence": frequency_map[meta["indFrequency"]],
                    "metrics_unit": meta["indUnit"],
                    "metrics_startdate": meta["indBeginDate"],
                    "metrics_enddate": meta["indEndDate"],
                    "metrics_update": meta["dataUpdateDate"],
                    "metrics_data_source": meta["dataSourceName"],
                    "edb_catagory": meta["category"],
                    "metrics_remark": meta["indDescription"],
                    "metrics_nation": ''
                })

                pageNum = 1
                pageSize = 10000
                hasNextPage = True
                while hasNextPage == True:
                    # 获取指标数值上新
                    jsonData = request_cicc_indData_v2(ciccIndex, pageNum, pageSize)
                    # 获取增量指标数据
                    code = jsonData['code']
                    if code == 0:
                        indicatorIds = jsonData['data']
                        print("开始采集" + str(len(indicatorIds)))
                        for index, value in enumerate(indicatorIds):
                            indDerId = value["indDerId"]
                            indDerName = value["indDerName"]
                            indDerDate = value["indDerDate"]
                            indDerValue = value["indDerValue"]
                            isPred = value["isPred"]
                            publishTime = value["publishTime"]
                            removeFlag = value["removeFlag"]
                            records.append({
                                "keys": 'CICC-' + indDerId + indDerDate,  # 主键
                                "edb_metrics_code": 'CICC-' + indDerId,
                                "metrics_value": indDerValue,  # 值可以是任何数据类型
                                "data_time": indDerDate,  # 确保使用正确的键名和字符串值
                                "third_code": indDerId,
                                "third_type": "CICC"
                            })
                        if len(indicatorIds) < pageSize:
                            hasNextPage = False

                        else:
                            pageNum += 1

        except Exception as e:
            print(f'sync indicators error: {e}')
            raise e
        finally:
            print("数据获取完成")
        print("cicc指标值数据数据查询完成,查询条数:" + str(len(records)))
        # 使用executemany执行批量插入
        conn = psycopg2.connect(host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
                                port=80,
                                dbname="odpstest",
                                user="LTAI5tQo9VtJ414iSEZrE8Vn",
                                password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
                                application_name="third_edb_data")
        # 使用executemany执行批量插入
        try:
            # 导入数据
            cur = conn.cursor()
            print("开始获取连接")
            # 初始化一个列表来存储批量插入的数据
            # 插入元数据
            insertlen = 0;
            batch = []
            # 初始化一个计数器
            counter = 0;
            for data in metas:
                # 为每条记录创建一个元组，并添加到批量列表中
                batch.append((
                    data['edb_metrics_code'],
                    data['edb_catagory'],
                    data['third_code'],
                    data['third_type'],
                    data['metrics_name'],
                    data['metrics_unit'],
                    data['metrics_frequence'],
                    data['metrics_remark'],
                    data['metrics_data_source'],
                    data['metrics_startdate'],
                    data['metrics_enddate'],
                    data['metrics_update'],
                    data['metrics_nation']
                ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(
                        ["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                    sql = (
                        f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                        f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                        f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                        f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                        f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen += 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(
                    ["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                sql = (
                    f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                    f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                    f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                    f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                    f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += len(batch)
            print("元数据数据写入完成,写入条数:" + str(insertlen))
            # 插入数据
            insertlen = 0;
            batch = []
            # 初始化一个计数器
            counter = 0
            # 准备批量插入的数据
            for data in records:
                # 为每条记录创建一个元组，并添加到批量列表中
                value = float(data['metrics_value'])
                if not math.isnan(value):
                    batch.append((
                        data['keys'],
                        data['edb_metrics_code'],
                        data['metrics_value'],
                        data['data_time'],
                        data['third_code'],
                        data['third_type']
                    ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                    sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen += 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += len(batch)

        except Exception as e:
            print("发生错误：", e)
            # 发生错误时回滚
            conn.rollback()
        finally:
            # 关闭游标和连接
            cur.close()
            conn.close()
        print("指标值数据数据写入完成")
        update_queries = []
        for data in metas:
            index_code = data['edb_metrics_code']
            end_date = data['metrics_enddate']
            update_query = f"""
                UPDATE data_browser.edb_index_center
                SET index_update_time = '{end_date}'
                WHERE index_code = '{index_code}'
                """
            update_queries.append(update_query)
        # 执行更新操作
        update_db_cicc('rds50g3807a68zwc9soo969.mysql.rds.aliyuncs.com', 'data_browser', 3306, 'D6a_T3_Brs',
                       update_queries)
        print("Update completed successfully.")

    elif edbType == 'VSIGNALS':
        # edbCodes = "187900000077"
        edbCodesList = edbCodes.split(',')
        grouped_list = []
        print("VSIGNALS每日刷新指标列表:" + edbCodes)

        for i in range(0, len(edbCodesList), 50):
            pair = edbCodesList[i:i + 50]
            grouped_list.append(','.join(pair))
        print("VSIGNALS指标分组:" + str(grouped_list))

        records = []
        metas = []
        refreshMetrics = []  # 将 refreshMetrics 定义在循环外部

        for codes in grouped_list:
            try:
                url = 'https://api.vsignals.cn/dbapi'
                response = requests.post(url, json={
                    "api_name": "thjj_meta_indicator",
                    "params": {
                        "factor_id": codes,
                        "hasChildren": False
                    },
                    "token": "7808213bf81f398744882dfc96220dcf"
                })
                response.raise_for_status()  # 检查请求是否成功
                json_data = response.json()  # 直接使用 response.json() 解析 JSON
                logging.info("成功解析JSON响应")
                logging.debug(f"JSON响应内容: {json_data}")

                if json_data['code'] == 0:
                    data = json_data.get('data', {})
                    logging.debug(f"data 类型: {type(data)}")
                    logging.debug(f"data 内容: {data}")

                    if isinstance(data, dict) and 'items' in data and len(data['items']) > 0:
                        fields = data.get('fields', [])
                        items = data.get('items', [])

                        for item in items:
                            if len(item) == len(fields):
                                item_dict = {}
                                for field, value in zip(fields, item):
                                    item_dict[field] = value

                                metas.append({
                                    "edb_metrics_code": "VSIGNALS-" + str(item_dict.get('factor_id')),
                                    "third_code": str(item_dict.get('factor_id')),
                                    "third_type": "VSIGNALS",
                                    "metrics_name": item_dict.get('factor_name'),
                                    "metrics_frequence": item_dict.get('freq'),
                                    "metrics_unit": item_dict.get('unit'),
                                    "metrics_startdate": None,  # 假设没有 startdate
                                    "metrics_enddate": None,  # 假设没有 enddate
                                    "metrics_update": None,  # 假设没有 update
                                    "metrics_data_source": item_dict.get('source'),
                                    "edb_catagory": None,  # 假设没有 category
                                    "metrics_remark": item_dict.get('meaning'),
                                    "metrics_nation": None  # 假设没有 nation
                                })
                                refreshMetrics.append(str(item_dict.get('factor_id')))
                                logging.info(f"成功处理指标: {item_dict.get('factor_id')}")
                    else:
                        logging.warning("data 中没有 items 或 items 为空")
                else:
                    logging.warning(f"响应码不为0: {json_data.get('code')}")
            except json.JSONDecodeError:
                logging.error("JSON解析错误")
            except KeyError as key_err:
                logging.error(f"键错误: {key_err}")
            except RequestException as req_err:
                logging.error(f"请求错误: {req_err}")
            except Exception as err:
                logging.error(f"其他错误: {err}")

        print("VSIGNALS API元数据请求成功")

        if len(refreshMetrics) > 0:
            try:
                dataCodes = ','.join(refreshMetrics)
                url = 'https://api.vsignals.cn/dbapi'
                response = requests.post(url, json={
                    "api_name": "thjj_value_indicator",
                    "params": {
                        "factor_id": dataCodes,
                        "hasChildren": False
                    },
                    "token": "7808213bf81f398744882dfc96220dcf"
                })
                response.raise_for_status()  # 检查请求是否成功
                json_data = response.json()  # 直接使用 response.json() 解析 JSON

                if json_data['code'] == 0:
                    data = json_data.get("data", {})
                    items = data.get("items", [])

                    for item in items:
                        timestamp = int(time.time() * 1000)  # 获取当前时间戳（毫秒级）
                        random_number = random.randint(1000, 9999)  # 生成一个4位随机数
                        records.append({
                            "keys": 'VSIGNALS-' + str(item[1]) + str(item[0]) + '-' + str(timestamp) + '-' + str(
                                random_number),
                            "edb_metrics_code": 'VSIGNALS-' + str(item[1]),
                            "metrics_value": item[3],
                            "data_time": item[0],
                            "third_code": str(item[1]),
                            "third_type": "VSIGNALS"
                        })
            except Timeout:
                print("VSIGNALS API请求超时")
            except RequestException as e:
                print(f"VSIGNALS API请求异常: {e}")
            else:
                print("VSIGNALS API请求成功")

        values = [d['third_code'] for d in metas]
        result = ','.join(values)
        print("VSIGNALS 元数据查询完成,查询条数:" + str(len(metas)))
        print("VSIGNALS 指标值数据数据查询完成,查询条数:" + str(len(records)))

        # 使用环境变量或配置文件来存储敏感信息
        conn = psycopg2.connect(
            host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
            port=80,
            dbname="odpstest",
            user="LTAI5tQo9VtJ414iSEZrE8Vn",
            password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
            application_name="third_edb_data"
        )

        # 使用executemany执行批量插入
        try:
            # 导入数据
            cur = conn.cursor()
            print("开始获取连接")
            # 初始化一个列表来存储批量插入的数据
            # 插入元数据
            insertlen = 0
            batch = []
            # 初始化一个计数器
            counter = 0
            for data in metas:
                # 为每条记录创建一个元组，并添加到批量列表中
                batch.append((
                    data['edb_metrics_code'],
                    data['edb_catagory'],
                    data['third_code'],
                    data['third_type'],
                    data['metrics_name'],
                    data['metrics_unit'],
                    data['metrics_frequence'],
                    data['metrics_remark'],
                    data['metrics_data_source'],
                    data['metrics_startdate'],
                    data['metrics_enddate'],
                    data['metrics_update'],
                    data['metrics_nation']
                ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(
                        ["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                    sql = (
                        f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                        f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                        f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                        f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                        f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen += 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(
                    ["(%s, %s, %s, %s, %s,%s, %s,%s, %s, %s, %s, %s,%s,NUll)" for _ in range(len(batch))])
                sql = (
                    f"INSERT INTO public.index_center_third_edb_metrics_main(edb_metrics_code, edb_catagory, third_code, third_type,metrics_name,"
                    f"metrics_unit,metrics_frequence,metrics_remark,metrics_data_source,metrics_startdate,metrics_enddate,metrics_update,metrics_nation,available_date) "
                    f"VALUES {values} ON CONFLICT (edb_metrics_code) DO UPDATE SET metrics_startdate = EXCLUDED.metrics_startdate,metrics_update = EXCLUDED.metrics_update,metrics_enddate = EXCLUDED.metrics_enddate,"
                    f"metrics_unit = EXCLUDED.metrics_unit,metrics_frequence = EXCLUDED.metrics_frequence,"
                    f"metrics_remark = EXCLUDED.metrics_remark,metrics_data_source = EXCLUDED.metrics_data_source, modify_time = now()")
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += len(batch)
            print("元数据数据写入完成,写入条数:" + str(insertlen))
            # 插入数据
            insertlen = 0;
            batch = []
            # 初始化一个计数器
            counter = 0
            # 准备批量插入的数据
            for data in records:
                # 为每条记录创建一个元组，并添加到批量列表中
                value = float(data['metrics_value'])
                if not math.isnan(value):
                    batch.append((
                        data['keys'],
                        data['edb_metrics_code'],
                        data['metrics_value'],
                        data['data_time'],
                        data['third_code'],
                        data['third_type']
                    ))
                # 每收集500条记录，执行一次插入操作
                if counter >= 500:
                    # 构造批量插入的SQL语句
                    values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                    sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"

                    # 执行批量插入
                    cur.execute(sql, [item for sublist in batch for item in sublist])
                    # 提交事务
                    conn.commit()
                    insertlen += 500
                    # 重置计数器和批量列表
                    counter = 0
                    batch = []
                    # 更新计数器
                counter += 1

            # 如果最后一批记录不足500条，也进行插入和提交
            if batch:
                values = ", ".join(["(%s, %s, %s, %s, %s,%s)" for _ in range(len(batch))])
                sql = f"INSERT INTO public.index_center_third_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"
                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen += len(batch)

        except Exception as e:
            print("发生错误：", e)
            # 发生错误时回滚
            conn.rollback()
            return
        finally:
            # 关闭游标和连接
            cur.close()
            conn.close()
        print("指标值数据数据写入完成")
        update_queries = []
        for data in metas:
            index_code = data['edb_metrics_code']
            end_date = data['metrics_enddate']
            update_query = f"""
                    UPDATE data_browser.edb_index_center
                    SET index_update_time = '{end_date}'
                    WHERE index_code = '{index_code}'
                    """
            update_queries.append(update_query)
        # 执行更新操作
        update_db_Vsignals('rds50g3807a68zwc9soo969.mysql.rds.aliyuncs.com', 'data_browser', 3306, 'D6a_T3_Brs',
                           update_queries)
        print("Update completed successfully.")


# 万得
def update_db(host, user, port, password, update_queries):
    conn = pymysql.connect(host=host, user=user, port=port, password=password, charset='utf8')
    try:
        c = conn.cursor()
        for query in update_queries:
            c.execute(query)
        conn.commit()
    except Exception as e:
        conn.rollback()
        raise e
    finally:
        conn.close()

# 中金
def update_db_cicc(host, user, port, password, update_queries):
    conn = pymysql.connect(host=host, user=user, port=port, password=password, charset='utf8')
    try:
        c = conn.cursor()
        for query in update_queries:
            c.execute(query)
        conn.commit()
    except Exception as e:
        conn.rollback()
        raise e
    finally:
        conn.close()


# 西部证券
def update_db_Vsignals(host, user, port, password, update_queries):
    conn = pymysql.connect(host=host, user=user, port=port, password=password, charset='utf8')
    try:
        c = conn.cursor()
        for query in update_queries:
            c.execute(query)
        conn.commit()
    except Exception as e:
        conn.rollback()
        raise e
    finally:
        conn.close()

# 获取中金token
def request_cicc_access_token():
    url = f'{CICC_BASE_API_URL}/oauth2.0/accessToken'
    data = {
        "grant_type": "client_credentials",
        "client_id": APP_ID,
        "client_secret": APP_SECRET,
    }
    response = requests.post(url,
                             headers={'Content-Type': 'application/x-www-form-urlencoded'},
                             data=data,
                             stream=False,
                             timeout=10,
                             )
    try:
        result = response.json()
        assert 'code' in result and result['code'] == 0, f'Failed to request access token:{response}'
        return result['data']['accessToken']
    except ValueError as e:
        print(f'\n\n>>>>>>>>>> ERROR <<<<<<<<<<\n\nInvalid response: {response.text}')
        raise e


# 获取中金秘钥调用接口
def request_cicc_data(method, path, params, body=None, content_type='application/json'):
    # 请根据实际情况进行缓存，2 小时有效
    access_token = request_cicc_access_token()

    params.update({'timestamp': str(int(datetime.now().timestamp() * 1000))})
    sorted_params = urlencode(dict(sorted(params.items())), safe='/@')
    body_str = '' if body is None else json.dumps(body)
    body_hash = sha256(body_str.encode('utf-8')).hexdigest()
    str_to_sign = "\n".join([method, path, sorted_params, body_hash])
    signature = hmac.new(APP_SECRET.encode('utf-8'), str_to_sign.encode('utf-8'),
                         digestmod=sha256).hexdigest()
    url = f'{CICC_BASE_API_URL}{path}'
    headers = {
        'Authorization': f'Bearer {access_token}',
        'X-CICC-Signature': signature,
        'Content-Type': 'application/json',
    }
    if method == 'GET':
        response = requests.get(url=url, params=params, headers=headers, data=
        body_str)
    elif method == 'POST':
        response = requests.post(url=url, params=params, headers=headers, data=
        body_str)
    else:
        assert False, f'Unsupported method {method}'
    try:
        data = response.json()
        # print(data)
        assert data['code'] == 0, f'Failed to request from {path}: {response.text}'
        return data
    except ValueError as e:
        print(f'\n\n>>>>>>>>>> ERROR <<<<<<<<<<\n\nInvalid response: {response.text}')
        raise e


# 调用中金指标定义接口
def request_cicc_ind_def(id):
    params = {}
    return request_cicc_data('GET', f'/data/indInfo/v1/'+str(id),params)


# 拉中金取指标数值
def request_cicc_indData_v2(index, currentPage ,pageSize):
    params = {}
    return request_cicc_data('GET', f'/data/indData/v2/'+index+'/'+str(currentPage)+'/'+str(pageSize), params)


if __name__ == '__main__':
    WindEebDataToDb()

