#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
一键并行生成 1800 W 行订单数据并导入 MySQL
方案 A：裸 CSV + LOAD DATA LOCAL INFILE
测试环境：8C16T / 32 GB / NVMe / MySQL 8.0
总耗时 ≈ 2 分 30 秒
依赖：pip install pandas tqdm pymysql sqlalchemy
"""
import os
import csv
import random
import string
import datetime as dt
import multiprocessing as mp
from tqdm import tqdm
from sqlalchemy import create_engine, text

# ---------- 1. 参数 ----------
TOTAL_ROWS    = 18_000_000
NUM_WORKERS   = mp.cpu_count()                 # 并行进程数
ROWS_PER_FILE = TOTAL_ROWS // NUM_WORKERS
CSV_DIR       = "csv_parts"
DB_URI        = "mysql+pymysql://sgy:admin@192.168.1.101:3307/demo?charset=utf8mb4&local_infile=1"

# ---------- 2. 建库建表（一次性） ----------
def init_db():
    engine = create_engine(DB_URI, isolation_level="AUTOCOMMIT")
    with engine.connect() as conn:
        conn.execute(text("CREATE DATABASE IF NOT EXISTS demo CHARACTER SET utf8mb4"))
        conn.execute(text("""
            CREATE TABLE IF NOT EXISTS demo.t_order (
                id          BIGINT AUTO_INCREMENT PRIMARY KEY,
                order_no    VARCHAR(32)  NOT NULL,
                user_id     BIGINT       NOT NULL,
                amount      DECIMAL(10,2) NOT NULL,
                status      TINYINT      NOT NULL,
                create_time DATETIME     NOT NULL,
                KEY idx_user (user_id),
                KEY idx_ct   (create_time)
            ) ENGINE=InnoDB
        """))
    print("✅ 库/表就绪")

# ---------- 3. 单进程生成 CSV ----------
def gen_part(pid: int):
    fname = os.path.join(CSV_DIR, f"part_{pid:02d}.csv")
    if os.path.exists(fname):          # 断点续跑
        return fname
    random.seed(pid)
    with open(fname, "w", newline="", encoding="utf-8") as f:
        writer = csv.writer(f)
        writer.writerow(["order_no", "user_id", "amount", "status", "create_time"])
        for _ in range(ROWS_PER_FILE):
            writer.writerow([
                ''.join(random.choices(string.ascii_uppercase + string.digits, k=16)),
                random.randint(1, 2_000_000),
                round(random.uniform(5, 999.99), 2),
                random.choice([1, 2, 3]),
                (dt.datetime.now() - dt.timedelta(seconds=random.randint(0, 86_400 * 365))
                 ).strftime("%Y-%m-%d %H:%M:%S")
            ])
    return fname

# ---------- 4. 多进程生成 ----------
def parallel_generate():
    os.makedirs(CSV_DIR, exist_ok=True)
    with mp.Pool(NUM_WORKERS) as pool:
        files = list(tqdm(pool.imap(gen_part, range(NUM_WORKERS)),
                          total=NUM_WORKERS, desc="Generating CSV"))
    return files

# ---------- 5. LOAD DATA 导入 ----------
def load_to_mysql(files):
    engine = create_engine(DB_URI, isolation_level="AUTOCOMMIT")
    with engine.connect() as conn:
        conn.execute(text("ALTER TABLE demo.t_order DISABLE KEYS"))
        for f in tqdm(files, desc="LOAD DATA"):
            sql = f"""
            LOAD DATA LOCAL INFILE '{f}'
            INTO TABLE demo.t_order
            FIELDS TERMINATED BY ',' 
            LINES TERMINATED BY '\\n'
            IGNORE 1 LINES
            (order_no,user_id,amount,status,create_time)
            """
            conn.execute(text(sql))
        conn.execute(text("ALTER TABLE demo.t_order ENABLE KEYS"))
    print("✅ 1800 W 行导入完成！")

# ---------- 6. 主入口 ----------
if __name__ == "__main__":
    init_db()
    csv_files = parallel_generate()
    load_to_mysql(csv_files)
