import json
from collections import defaultdict

import mysql.connector
from mysql.connector import pooling
import uuid
import random
from datetime import datetime, timedelta
from decimal import Decimal, ROUND_HALF_UP


from tqdm import tqdm  # 进度条工具
from common import snowFaker as sf
from python_faker.faker17 import calculate_order_amounts
from python_faker.test import generate_order_time

# ------------------------------
# 配置参数
# ------------------------------
BATCH_SIZE = 1000  # 每批插入量 (根据内存调整)
USER_COUNT = 500000
DB_CONFIG = {
    "host": "localhost",
    "user": "root",
    "password": "123456",
    "database": "esl",
    "pool_name": "mypool",
    "pool_size": 10  # 连接池大小
}

# ------------------------------
# 创建数据库连接池
# ------------------------------
connection_pool = pooling.MySQLConnectionPool(**DB_CONFIG)


def batch_create_orders():
    """批量生成订单数据"""
    # 获取所有用户ID和地址（预先加载到内存）
    with connection_pool.get_connection() as conn:
        cursor = conn.cursor()
        cursor.execute("SELECT user_id, address_id FROM address")
        user_address_map = {row[0]: row[1] for row in cursor.fetchall()}

    # 批量生成订单数据
    order_batch, order_dish_batch = [], []
    for _ in tqdm(range(USER_COUNT), desc="生成数据"):
        user_id = random.choice(list(user_address_map.keys()))

        # 生成单条订单数据（模拟原有逻辑）
        order_data = generate_single_order(user_id, user_address_map)
        order_batch.append(order_data["order_detail"])
        order_dish_batch.extend(order_data["order_dishes"])

        # 批量提交
        if len(order_batch) >= BATCH_SIZE:
            insert_batch_data(order_batch, order_dish_batch)
            order_batch.clear()
            order_dish_batch.clear()

    # 插入剩余数据
    if order_batch:
        insert_batch_data(order_batch, order_dish_batch)


def generate_single_order(user_id, user_address_map):
    """生成单个订单数据（无数据库交互版）"""
    # 1. 基础数据生成
    order_id = uuid.uuid4().hex
    order_time = generate_order_time()
    finish_time = order_time + timedelta(hours=random.uniform(0, 5))

    # 2. 从内存获取地址
    address_id = user_address_map.get(user_id, None)
    if not address_id:
        return {"order_detail": None, "order_dishes": []}

    # 3. 模拟菜品选择（需预加载菜品数据到内存）
    canteen_id, dishes = random.choice(CANTEEN_DISH_CACHE)
    dishes = random.sample(dishes,3)
    # 4. 金额计算
    amounts = calculate_order_amounts(dishes)

    # 5. 构造订单数据
    order_detail = (
    sf.get_id_param(1,1), order_id, user_id, order_time, finish_time,
    address_id, amounts['commodity_money'], amounts['delivery_money'],
    amounts['packaging_money'], amounts['pay_money'], amounts['discounts'],
    "备注", random.choice(['8', '9', '10', '11', '12']),
    random.choice(['1', '2', '3', '4']), None, canteen_id)
    # 6. 订单明细
    order_dishes = []
    for dish in dishes:
        order_dishes.append((
            sf.get_id_param(2,1), order_id, dish['dish_id'], 1,
            random.choice(FLAVOR_CACHE.get(dish['dish_id'], [None])),
            random.choice(SPEC_CACHE.get(dish['dish_id'], [None]))
        ))

    return {"order_detail": order_detail, "order_dishes": order_dishes}


def insert_batch_data(order_batch, order_dish_batch):
    """批量插入数据"""
    try:
        with connection_pool.get_connection() as conn:
            cursor = conn.cursor()

            # 批量插入订单主表
            cursor.executemany("""
                INSERT INTO orderdetail (
                    id, order_id, user_id, order_time, finish_time,
                    address_id, commodity_money, delivery_money,
                    packaging_money, pay_money, discounts, order_remark,
                    tag_id, pay_id, deliveryman_id, canteen_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """, order_batch)


            # 批量插入订单明细
            cursor.executemany("""
                INSERT INTO order_dish (
                    id, order_id, dish_id, quantity,
                    flavor_id, specification_id
                ) VALUES (%s, %s, %s, %s, %s, %s)
            """, order_dish_batch)
            conn.commit()
    except Exception as e:
        print(f"批量插入失败: {e}")
        conn.rollback()


# ------------------------------
# 初始化预加载数据（启动时执行一次）
# ------------------------------
def init_cache():
    """预加载所有必要数据到内存（修复版）"""
    global CANTEEN_DISH_CACHE, FLAVOR_CACHE, SPEC_CACHE

    with connection_pool.get_connection() as conn:
        # 使用字典游标
        cursor = conn.cursor(dictionary=True)  # 关键修复

        # 1. 加载食堂-菜品关系
        cursor.execute("""
            SELECT canteen_id, 
                   JSON_ARRAYAGG(
                       JSON_OBJECT('dish_id', dish_id, 'price', price)
                   ) AS dishes
            FROM dish 
            GROUP BY canteen_id 
            HAVING COUNT(*) >=3
        """)
        CANTEEN_DISH_CACHE = []
        for row in cursor:
            # 确保字段名正确
            canteen_id = row['canteen_id']
            dishes = json.loads(row['dishes'])
            CANTEEN_DISH_CACHE.append((canteen_id, dishes))

        # 2. 加载口味数据
        cursor.execute("SELECT dish_id, flavor_id FROM dish_flavor")
        FLAVOR_CACHE = defaultdict(list)
        for row in cursor:
            FLAVOR_CACHE[row['dish_id']].append(row['flavor_id'])  # 正确访问

        # 3. 加载规格数据
        cursor.execute("SELECT dish_id, specification_id FROM dish_specification")
        SPEC_CACHE = defaultdict(list)
        for row in cursor:
            SPEC_CACHE[row['dish_id']].append(row['specification_id'])  # 正确访问


if __name__ == "__main__":
    init_cache()  # 启动时预加载数据
    batch_create_orders()