import os
import pandas as pd
from dotenv import load_dotenv
from sqlalchemy import create_engine, text
from loguru import logger

TRADE_DATE_MIN=None
TRADE_DATE_MAX=None


def load_environment_variables():
    """加载环境变量"""
    logger.info("开始加载环境变量...")
    load_dotenv(dotenv_path='.env')
    env_vars = {
        'DB_PATH': os.getenv('DB_PATH'),
        'MYSQL_HOST': os.getenv('MYSQL_HOST'),
        'MYSQL_PORT': os.getenv('MYSQL_PORT'),
        'MYSQL_DATABASE': os.getenv('MYSQL_DATABASE'),
        'MYSQL_USER': os.getenv('MYSQL_USER'),
        'MYSQL_PASSWORD': os.getenv('MYSQL_PASSWORD'),
        'START_DATE': os.getenv('START_DATE', '1900-01-01'),
        'END_DATE': os.getenv('END_DATE', '2999-12-31'),
        'FACTION': float(os.getenv('FACTION', '0.1')),
        'TSCODE_START': os.getenv('TSCODE_START', '0'),
        'TSCODE_END': os.getenv('TSCODE_END', '999999'),
        'LEAST_BARS': int(os.getenv('LEAST_BARS', '100')),
        'STRATEGY': os.getenv('STRATEGY', 'DEFAULT')
    }
    
    # 优先使用SQLite，如果没有设置DB_PATH则使用MySQL
    if env_vars['DB_PATH']:
        logger.info(f"环境变量加载完成 - 数据库类型: SQLite")
        logger.info(f"数据库路径: {env_vars['DB_PATH']}")
    else:
        logger.info(f"环境变量加载完成 - 数据库类型: MySQL")
        logger.info(f"数据库: {env_vars['MYSQL_HOST']}:{env_vars['MYSQL_PORT']}/{env_vars['MYSQL_DATABASE']}")
    
    logger.info(f"查询时间范围: {env_vars['START_DATE']} 到 {env_vars['END_DATE']}")
    logger.info(f"股票筛选概率: {env_vars['FACTION']}")
    logger.info(f"股票筛选范围: {env_vars['TSCODE_START']} 到 {env_vars['TSCODE_END']}")
    logger.info(f"最小K线数量: {env_vars['LEAST_BARS']}")
    return env_vars

def create_database_engine(env_vars):
    """创建数据库连接引擎"""
    logger.info("正在创建数据库连接引擎...")
    
    # 优先使用SQLite
    if env_vars.get('DB_PATH'):
        try:
            # 检查SQLite文件是否存在
            if not os.path.exists(env_vars['DB_PATH']):
                logger.error(f"SQLite数据库文件不存在: {env_vars['DB_PATH']}")
                raise FileNotFoundError(f"SQLite数据库文件不存在: {env_vars['DB_PATH']}")
            
            # 创建SQLite连接
            engine = create_engine(f"sqlite:///{env_vars['DB_PATH']}")
            
            # 测试连接
            with engine.connect() as conn:
                conn.execute(text("SELECT 1"))
            logger.success("SQLite数据库连接成功")
            return engine
            
        except Exception as e:
            logger.error(f"SQLite数据库连接失败: {e}")
            raise
    else:
        # 使用MySQL
        try:
            engine = create_engine(
                f"mysql+pymysql://{env_vars['MYSQL_USER']}:{env_vars['MYSQL_PASSWORD']}@{env_vars['MYSQL_HOST']}:{env_vars['MYSQL_PORT']}/{env_vars['MYSQL_DATABASE']}?charset=utf8mb4"
            )
            # 测试连接
            with engine.connect() as conn:
                conn.execute(text("SELECT 1"))
            logger.success("MySQL数据库连接成功")
            return engine
        except Exception as e:
            logger.error(f"MySQL数据库连接失败: {e}")
            raise

def query_stock_data(engine, start_date, end_date):
    """查询股票数据"""
    logger.info(f"开始查询股票数据，时间范围: {start_date} 到 {end_date}")
    query = f"""
        SELECT ts_code, trade_date, open, high, low, close, pre_close, vchange, pct_chg, vol, amount
        FROM stock_daily
        WHERE trade_date >= '{start_date}' AND trade_date <= '{end_date}'
        ORDER BY ts_code, trade_date
    """
    try:
        df = pd.read_sql(query, engine)
        logger.info(f"数据查询完成，共获取 {len(df)} 条记录")
        logger.info(f"涉及股票数量: {df['ts_code'].nunique()} 只")

        global TRADE_DATE_MIN, TRADE_DATE_MAX
        TRADE_DATE_MIN = df['trade_date'].min()
        TRADE_DATE_MAX = df['trade_date'].max()
        
        # 将日期转换为YYYY-MM-DD格式
        if isinstance(TRADE_DATE_MAX, str) and len(TRADE_DATE_MAX) == 8:
            # 如果是YYYYMMDD格式，转换为YYYY-MM-DD
            TRADE_DATE_MAX = f"{TRADE_DATE_MAX[:4]}-{TRADE_DATE_MAX[4:6]}-{TRADE_DATE_MAX[6:8]}"
        if isinstance(TRADE_DATE_MIN, str) and len(TRADE_DATE_MIN) == 8:
            # 如果是YYYYMMDD格式，转换为YYYY-MM-DD
            TRADE_DATE_MIN = f"{TRADE_DATE_MIN[:4]}-{TRADE_DATE_MIN[4:6]}-{TRADE_DATE_MIN[6:8]}"
        
        logger.info(f"时间范围: {TRADE_DATE_MIN} 到 {TRADE_DATE_MAX}")

        return df
    except Exception as e:
        logger.error(f"数据查询失败: {e}")
        raise

def preprocess_data(df):
    """预处理数据：格式转换和清洗"""
    logger.info("开始数据预处理...")
    original_count = len(df)
    
    # 日期格式转换
    logger.debug("转换日期格式...")
    if df['trade_date'].dtype == object:
        df['trade_date'] = pd.to_datetime(df['trade_date'])

    # 强制数值列为 float
    logger.debug("转换数值列类型...")
    float_cols = ['open', 'high', 'low', 'close', 'pre_close', 'vchange', 'pct_chg', 'vol', 'amount']
    for col in float_cols:
        df[col] = pd.to_numeric(df[col], errors='coerce')

    # 去除有缺失值的行
    logger.debug("清理缺失值...")
    required_cols = ['open', 'high', 'low', 'close', 'vol']
    df = df.dropna(subset=required_cols)

    # 确保所有必需的数值列都是float类型
    for col in ['open', 'high', 'low', 'close', 'vol']:
        df[col] = df[col].astype(float)
    
    
    final_count = len(df)
    removed_count = original_count - final_count
    logger.info(f"数据预处理完成")
    logger.info(f"原始数据: {original_count} 条，处理后: {final_count} 条，移除: {removed_count} 条")
    logger.info(f"剩余股票数量: {df['ts_code'].nunique()} 只")
    
    return df 