import json
import time

from sqlalchemy.dialects.mysql import insert
from sqlalchemy.exc import OperationalError
from sqlmodel import select

from config.db import get_session
from entity.model.crawler_record_model import CrawlerCityRecord
from util.mylog import my_logger


def bulk_save_or_update(model, data_list: list, unique_keys: list, batch_size=1000):
    if not data_list:
        return
    unique_keys.append("create_time")
    unique_keys.append("id")
    i = 0
    while True:
        try:
            with get_session() as session:
                # 获取所有字段名
                columns = list(model.model_fields.keys())
                # 分批次执行
                for i in range(0, len(data_list), batch_size):
                    batch = data_list[i:i + batch_size]  # 获取当前批次的数据
                    data = [{col: getattr(obj, col) for col in columns} for obj in batch]
                    # 构造 INSERT 语句
                    stmt = insert(model).values(data)
                    # 生成 `ON DUPLICATE KEY UPDATE` 语句（排除主键和唯一键）
                    update_dict = {col: getattr(stmt.inserted, col) for col in columns if col not in unique_keys}
                    stmt = stmt.on_duplicate_key_update(**update_dict)
                    # 执行 SQL
                    session.execute(stmt)
                    session.commit()  # 每次批量提交一次
            return
        except OperationalError as e:
            i += 1
            if i > 3:
                return
            my_logger.warning(f"操作错误，尝试重连 ({i + 1}/{3})... 错误: {e}")
            time.sleep(10)
        except Exception as e:
            my_logger.error(f"Error bulk_save_or_update task: {e}")
            return
with get_session() as session:
    stat = select(CrawlerCityRecord)
    res = session.exec(stat).all()
    upsert_list = []
    d = {}
    for item in res:
        for i in json.loads(item.data):
            c = CrawlerCityRecord(**item.model_dump(exclude={"id"}))
            c.http_url = i['httpUrl']
            upsert_list.append(c)
            # d[i['httpUrl']] = c
bulk_save_or_update(CrawlerCityRecord, upsert_list, ['http_url'])