from json2ziwei.api import SolarAPI
from json2ziwei.convert import convert_main_json_to_text
import os
import mysql.connector
from mysql.connector import Error
from dotenv import load_dotenv
import json
from datetime import datetime, timedelta
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from contextlib import contextmanager

# 加载环境变量
load_dotenv()

# 配置优化 - 集中参数管理
CONFIG = {
    'db': {
        'host': os.getenv('DB_HOST', 'localhost'),
        'database': os.getenv('DB_NAME', 'astrolabe'),
        'user': os.getenv('DB_USER', 'root'),
        'password': os.getenv('DB_PASSWORD', ''),
        'port': int(os.getenv('DB_PORT', 3306)),
        'pool_size': 32,  # 增大连接池适应高并发
        'connection_timeout': 10
    },
    'thread_pool': {
        'max_workers': min(64, (os.cpu_count() or 4) * 8),  # 调整并发数
        'batch_size': 1000  # 任务分批处理大小
    },
    'retry': {
        'max_attempts': 3,
        'delay': 0.5  # 基础重试延迟
    },
    'api': {
        'endpoint': "http://localhost:3000"
    }
}

# 初始化数据库连接池
DB_POOL = None


def init_db_pool():
    """初始化数据库连接池（优化参数）"""
    global DB_POOL
    try:
        DB_POOL = mysql.connector.pooling.MySQLConnectionPool(
            pool_name="astrolabe_pool",
            pool_size=CONFIG['db']['pool_size'],
            pool_reset_session=True,
            connection_timeout=CONFIG['db']['connection_timeout'],
            host=CONFIG['db']['host'],
            database=CONFIG['db']['database'],
            user=CONFIG['db']['user'],
            password=CONFIG['db']['password'],
            port=CONFIG['db']['port']
        )
        print(f"数据库连接池初始化成功 (大小: {CONFIG['db']['pool_size']})")
    except Error as e:
        print(f"数据库连接池初始化失败: {e}")
        raise


@contextmanager
def get_db_connection():
    """数据库连接上下文管理器 - 自动管理连接生命周期"""
    connection = None
    try:
        connection = DB_POOL.get_connection()
        yield connection
    except Error as e:
        print(f"数据库连接错误: {e}")
        raise
    finally:
        if connection and connection.is_connected():
            connection.close()  # 放回连接池


def save_astrolabe_data(date, hour, timezone, gender, calendar, raw_json, text_description):
    """优化的数据库插入 - 使用预编译语句和批量插入思想"""
    try:
        with get_db_connection() as connection:
            cursor = connection.cursor(prepared=True)  # 预编译语句提高性能
            query = '''
            INSERT INTO profession_astrolabe_data 
            (date, timezone, gender, calendar, raw_json, text_description)
            VALUES (%s, %s, %s, %s, %s, %s)
            '''
            values = (date, timezone, gender, calendar, raw_json, text_description)
            cursor.execute(query, values)
            connection.commit()
            cursor.close()
            return True
    except Error as e:
        print(f"保存数据错误: {e}")
        return False


def fetch_and_save_astrolabe(date, hour, timezone, gender, calendar='solar'):
    """优化的获取与保存逻辑 - 减少重复对象创建"""
    # 复用API对象（如果API支持），这里假设每次创建是必要的
    solar_api = SolarAPI(CONFIG['api']['endpoint'])
    retry_count = 0

    while retry_count < CONFIG['retry']['max_attempts']:
        try:
            # 优化JSON处理 - 减少中间步骤
            json_data = solar_api.get_astrolabe_data(
                date,
                int(timezone),
                gender,
                is_solar=(calendar == 'solar')
            )

            # 并行处理文本转换和JSON序列化（如果数据量大）
            text_description = convert_main_json_to_text(json_data)
            raw_json_str = json.dumps(json_data, ensure_ascii=False)

            if save_astrolabe_data(
                    date, hour, timezone, gender, calendar,
                    raw_json_str, text_description
            ):
                return True

            retry_count += 1
            time.sleep(CONFIG['retry']['delay'] * retry_count)  # 指数退避

        except Exception as e:
            retry_count += 1
            print(f"重试 {retry_count}/{CONFIG['retry']['max_attempts']} - {date} {hour}时 {gender}: {e}")
            time.sleep(CONFIG['retry']['delay'] * retry_count)

    print(f"失败: {date} {hour}时 {gender}")
    return False


def generate_date_range(start_date, end_date):
    """生成日期范围 - 使用生成器减少内存占用"""
    current_date = datetime.strptime(start_date, "%Y-%m-%d")
    end = datetime.strptime(end_date, "%Y-%m-%d")

    while current_date <= end:
        yield current_date.strftime("%Y-%m-%d")
        current_date += timedelta(days=1)


def batch_process_dates(start_date, end_date):
    """分批处理任务 - 避免一次性创建过多线程"""
    dates = generate_date_range(start_date, end_date)
    total = 0
    success = 0
    max_workers = CONFIG['thread_pool']['max_workers']
    batch_size = CONFIG['thread_pool']['batch_size']

    print(f"使用线程池 (最大并发: {max_workers}, 批处理大小: {batch_size})")

    # 任务分批处理
    while True:
        batch = []
        # 填充当前批次
        for _ in range(batch_size):
            try:
                date = next(dates)
                for hour in range(12):
                    for gender in ['男', '女']:
                        batch.append((date, hour, hour, gender))
            except StopIteration:
                break

        if not batch:
            break  # 所有任务处理完毕

        total += len(batch)
        print(f"处理批次 - 任务数: {len(batch)}, 累计: {total}")

        # 处理当前批次
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            futures = {
                executor.submit(fetch_and_save_astrolabe, *params): params
                for params in batch
            }

            for future in as_completed(futures):
                try:
                    if future.result():
                        success += 1
                except Exception as e:
                    print(f"任务异常 {futures[future]}: {e}")

    print(f"处理完成 - 总任务: {total}, 成功: {success}, 失败: {total - success}")


if __name__ == '__main__':
    init_db_pool()

    # 时间范围可以根据需要调整，建议先小范围测试
    start_date = "1986-07-11"
    end_date = "1986-07-14"

    print(f"开始处理 {start_date} 至 {end_date} 的星盘数据...")
    start_time = time.time()

    batch_process_dates(start_date, end_date)

    elapsed = time.time() - start_time
    print(f"所有处理完成，耗时: {elapsed:.2f}秒")

# '1924-05-28', '1924-05-30', '1924-06-22', '1924-07-07',
# '1924-09-09', '1925-04-12', '2030-07-09', '2045-10-12', '2045-10-14'
