import requests
import json
import psycopg2
from requests.exceptions import RequestException, Timeout

def writeApiDate():
    records=[]
    edbType=getParam('edbType')
    edbCodes=getParam('edbCodes')
    beginDate=getParam('beginDate')
    endDate=getParam('endDate')
    if edbType=='WD':
        try:
            url = 'http://10.74.194.151:8832/windapi?apiName=edb&edbCodes='+edbCodes+'&beginDate='+beginDate+'&endDate='+endDate+'&originalResult=true'
            # 发送POST请求
            response = requests.post(url, data={})
            data = json.loads(response.text)

            if data['code']==0:
                datas=data['data']
                times=data['times']
                codes=data['codes']
                for index, value in enumerate(codes):
                    dataList=datas[index]
                    for dindex, dvalue in enumerate(dataList):
                        records.append({
                            "keys": 'WD-' +value + times[dindex],  # 主键
                            "edb_metrics_code": 'WD-' + value,  # 主键
                            "wind_code": value,  # 键名使用字符串
                            "metrics_value": dvalue,  # 值可以是任何数据类型
                            "data_time": times[dindex],  # 确保使用正确的键名和字符串值
                            "third_code": codes,
                            "third_type": "WD"
                        })
        except Timeout:
            # 捕获超时异常
            print("WDAPI请求超时")
        except RequestException as e:
            # 捕获其他请求相关的异常
            print(f"WDAPI请求异常: {e}")
        else:
            # 如果没有异常发生，处理响应
            print("WDAPI请求成功")
        print("wind数据查询完成,查询条数:"+str(len(records)))
    elif edbType=='TL':
        try:
            url = 'http://10.111.22.165:8081/dataYesApi/edb'
            param={
                "appName": "databrowser",
                "params": {
                    "beginDate": endDate,
                    "endDate": beginDate,
                    "ids": edbCodes
                },
                "token": "databrowser_datayes"
            }
            json_data = json.dumps(param)
            print(url)
            # 发送POST请
            response = requests.post(url, data=json_data,headers={'Content-Type': 'application/json'},timeout=60)
            print(response)
            data = json.loads(response.text)
            if data['success']==True:
                dataList=data['data']
                for index, value in enumerate(dataList):
                    code=value['indic']['indicID']
                    print(code)
                    dataList=value['data']
                    for dindex, dvalue in enumerate(dataList):
                        records.append({
                            "keys": 'TL-'+code+dvalue['periodDate'],  # 主键
                            "edb_metrics_code": 'TL-'+code,  # edbcode
                            "metrics_value": dvalue['dataValue'],      # 值可以是任何数据类型
                            "data_time":dvalue['periodDate'],   # 确保使用正确的键名和字符串值
                            "third_code": code,
                            "third_type": "TL"
                        })
            print("TL数据查询完成,查询条数:"+str(len(records)))
        except Timeout:
            # 捕获超时异常
            print("TLAPI请求超时")
        except RequestException as e:
            # 捕获其他请求相关的异常
            print(f"TLAPI请求异常: {e}")
        else:
            # 如果没有异常发生，处理响应
            print("TLAPI请求成功")
        print("TL数据查询完成,查询条数:"+str(len(records)))
    conn = psycopg2.connect(host="hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com",
                            port=80,
                            dbname="odpstest",
                            user="LTAI5tQo9VtJ414iSEZrE8Vn",
                            password="BPBKqDiQ7JKPl6o3QDIFg6kh71nakS",
                            application_name="third_edb_data")
    # 使用executemany执行批量插入
    try:
        # 导入数据
        cur = conn.cursor()
        print("开始获取连接")
        # 初始化一个列表来存储批量插入的数据
        insertlen=0;
        batch = []
        # 初始化一个计数器
        counter = 0
        # 准备批量插入的数据
        for data in records:
            # 为每条记录创建一个元组，并添加到批量列表中
            batch.append((
                data['keys'],
                data['edb_metrics_code'],
                data['metrics_value'],
                data['data_time'],
                data['third_code'],
                data['third_type']
            ))
            # 每收集500条记录，执行一次插入操作
            if counter >= 500:
                # 构造批量插入的SQL语句
                values = ", ".join(["(%s, %s, %s, %s, %s,%s, %s)" for _ in range(len(batch))])
                sql = f"INSERT INTO public.index_center_wind_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"

                # 执行批量插入
                cur.execute(sql, [item for sublist in batch for item in sublist])
                # 提交事务
                conn.commit()
                insertlen+= 500
                # 重置计数器和批量列表
                counter = 0
                batch = []
                # 更新计数器
            counter += 1

        # 如果最后一批记录不足500条，也进行插入和提交
        if batch:
            values = ", ".join(["(%s, %s, %s, %s, %s,%s, %s)" for _ in range(len(batch))])
            sql = f"INSERT INTO public.index_center_wind_edb_metrics_data(keys, edb_metrics_code, metrics_value, data_time,third_code,third_type) VALUES {values} ON CONFLICT (keys) DO UPDATE SET metrics_value = EXCLUDED.metrics_value, modify_time = now()"
            conn.commit()
            insertlen+= len(batch)

    except Exception as e:
        print("发生错误：", e)
        # 发生错误时回滚
        conn.rollback()
    finally:
        # 关闭游标和连接
        cur.close()
        conn.close()
    print("数据写入完成,写入条数:"+str(insertlen))
if __name__ == '__main__':
    writeApiDate()










