# -*- coding = utf-8 -*-
# name   : 
# Author : admin
# Email  : 
# File   : moni_datas.py
# Ver    : 1.0
# Time   : 2025-07-16 14:48

import random
import string
import time

from datetime import datetime, timedelta

from utils.operate_mongodb import OperateMongodb
from config import MONGODB_CONF as MONGODB_CONF

mongo_object = OperateMongodb(MONGODB_CONF)
client, db_mongo = mongo_object.conn_mongodb()

DB_NAME = "lyj"
COLLECTION_NAME = "test_large_data4"

# 数据生成参数
TOTAL_RECORDS = 1_000_000  # 100万条记录
BATCH_SIZE = 5000  # 每批插入数量


def generate_random_data(num_records):
    """生成批量模拟数据"""
    batch = []
    for _ in range(num_records):
        # 生成随机数据
        record = {
            "name": ''.join(random.choices(string.ascii_uppercase, k=10)),
            "email": f"{''.join(random.choices(string.ascii_lowercase, k=8))}@example.com",
            "age": random.randint(18, 80),
            "balance": round(random.uniform(0, 10000), 2),
            "is_active": random.choice([True, False]),
            "created_at": (datetime.now() - timedelta(days=random.randint(0, 365 * 5))).strftime("%Y-%m-%d"),
            "tags": random.sample(["A", "B", "C", "D", "E"], k=3),
            "location": {
                "city": random.choice(["New York", "London", "Tokyo", "Beijing", "Paris"]),
                "coordinates": [round(random.uniform(-180, 180), 6),
                                round(random.uniform(-90, 90), 6)]
            }
        }
        batch.append(record)
    return batch


def insert_massive_data():
    """批量插入数据到MongoDB"""
    try:
        # 连接数据库
        db = client[DB_NAME]
        collection = db[COLLECTION_NAME]

        # 删除旧集合（如果存在）
        if COLLECTION_NAME in db.list_collection_names():
            db.drop_collection(COLLECTION_NAME)
            print(f"已清除旧集合: {COLLECTION_NAME}")

        # 创建索引（加速后续操作）
        collection.create_index("created_at")
        collection.create_index("is_active")
        print("索引创建完成: created_at, is_active")

        # 分批生成并插入数据
        start_time = time.time()
        inserted_count = 0
        batch_count = 0

        while inserted_count < TOTAL_RECORDS:
            # 计算本次插入数量
            current_batch_size = min(BATCH_SIZE, TOTAL_RECORDS - inserted_count)

            # 生成批量数据
            data_batch = generate_random_data(current_batch_size)

            # 批量插入
            collection.insert_many(data_batch, ordered=False)

            inserted_count += current_batch_size
            batch_count += 1

            # 每10批打印一次进度
            if batch_count % 10 == 0:
                print(f"已插入: {inserted_count}/{TOTAL_RECORDS} 条记录 ({inserted_count / TOTAL_RECORDS:.1%})")

        # 统计信息
        duration = time.time() - start_time
        print(f"\n插入完成! 共插入 {inserted_count} 条记录")
        print(f"耗时: {duration:.2f} 秒")
        print(f"平均速度: {inserted_count / duration:.0f} 条/秒")

        # 验证数据
        count = collection.count_documents({})
        print(f"集合实际文档数: {count}")

    except Exception as e:
        print(f"发生错误: {str(e)}")
    finally:
        client.close()


if __name__ == "__main__":
    insert_massive_data()