# -*- coding = utf-8 -*-
# name   : 
# Author : admin
# Email  : 
# File   : moni_data.py
# Ver    : 1.0
# Time   : 2025-07-16 14:38

from datetime import datetime, timedelta
import random
import string
import time
from tqdm import tqdm  # 进度条工具

from utils.operate_mongodb import OperateMongodb
from config import MONGODB_CONF_GYCC as MONGODB_CONF

mongo_object = OperateMongodb(MONGODB_CONF)
client, db_mongo = mongo_object.conn_mongodb()

# 数据生成参数
TOTAL_RECORDS = 1_000_000  # 100万条记录
BATCH_SIZE = 5000  # 每批插入数量


def generate_random_data(num_records):
    """生成批量模拟数据"""
    batch = []
    for _ in range(num_records):
        # 生成随机数据
        record = {
            "name": ''.join(random.choices(string.ascii_uppercase, k=10)),
            "email": f"{''.join(random.choices(string.ascii_lowercase, k=8))}@example.com",
            "age": random.randint(18, 80),
            "balance": round(random.uniform(0, 10000), 2),
            "is_active": random.choice([True, False]),
            "created_at": datetime.now() - timedelta(days=random.randint(0, 365 * 5)),
            "tags": random.sample(["A", "B", "C", "D", "E"], k=3),
            "location": {
                "city": random.choice(["New York", "London", "Tokyo", "Beijing", "Paris"]),
                "coordinates": [round(random.uniform(-180, 180), 6),
                                round(random.uniform(-90, 90), 6)]
            }
        }
        batch.append(record)
    return batch


def insert_massive_data():
    """批量插入数据到MongoDB"""
    try:
        collection = db_mongo['test_large_data2']

        # 创建索引（加速后续删除操作）
        collection.create_index("created_at")
        collection.create_index("is_active")
        print("索引创建完成: created_at, is_active")

        # 分批生成并插入数据
        start_time = time.time()
        inserted_count = 0

        # 使用进度条
        with tqdm(total=TOTAL_RECORDS, desc="插入进度") as pbar:
            while inserted_count < TOTAL_RECORDS:
                # 计算本次插入数量
                batch_count = min(BATCH_SIZE, TOTAL_RECORDS - inserted_count)

                # 生成批量数据
                data_batch = generate_random_data(batch_count)

                # 批量插入
                collection.insert_many(data_batch, ordered=False)

                inserted_count += batch_count
                pbar.update(batch_count)

        # 统计信息
        duration = time.time() - start_time
        print(f"\n插入完成! 共插入 {inserted_count} 条记录")
        print(f"耗时: {duration:.2f} 秒")
        print(f"平均速度: {inserted_count / duration:.0f} 条/秒")

        # 验证数据
        count = collection.count_documents({})
        print(f"集合实际文档数: {count}")

        # 创建TTL索引示例（自动删除过期数据）
        # collection.create_index("created_at", expireAfterSeconds=86400*30)  # 30天后自动删除

    except Exception as e:
        print(f"发生错误: {str(e)}")
    finally:
        client.close()


if __name__ == "__main__":
    insert_massive_data()