import pandas as pd
import akshare as ak
from sqlalchemy import create_engine, Column, Integer, String, Text, Date, UniqueConstraint
from sqlalchemy.orm import declarative_base, sessionmaker
from datetime import datetime, timedelta
import time
import logging
from typing import Optional, Dict, List

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('stock_gsrl_gsdt_em.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

# 数据库配置
DB_CONFIG = {
    'dialect': 'postgresql',
    'driver': 'psycopg2',
    'username': 'leizhen',
    'password': '751982leizhen',
    'host': 'localhost',
    'port': '5432',
    'database': 'stock_db'
}

# 创建SQLAlchemy引擎
DATABASE_URL = f"{DB_CONFIG['dialect']}+{DB_CONFIG['driver']}://{DB_CONFIG['username']}:{DB_CONFIG['password']}@{DB_CONFIG['host']}:{DB_CONFIG['port']}/{DB_CONFIG['database']}"
engine = create_engine(DATABASE_URL, pool_size=10, max_overflow=20)
Session = sessionmaker(bind=engine)
Base = declarative_base()

class StockGsrlGsdtEm(Base):
    """股票公司日历和重大事件数据表模型"""
    __tablename__ = 'stock_gsrl_gsdt_em'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    serial_number = Column(Integer, comment="序号")
    stock_code = Column(String(20), index=True, comment="股票代码")
    stock_name = Column(String(50), comment="股票简称")
    event_type = Column(String(100), index=True, comment="事件类型")
    event_detail = Column(Text, comment="具体事项")
    trade_date = Column(Date, index=True, comment="交易日期")
    
    __table_args__ = (
        UniqueConstraint('stock_code', 'trade_date', 'event_type', name='uq_stock_date_event'),
    )

def create_table():
    """创建数据表"""
    try:
        Base.metadata.create_all(engine)
        logger.info("数据表创建成功")
    except Exception as e:
        logger.error(f"创建数据表失败: {str(e)}")
        raise

def save_data(df: pd.DataFrame, date: str) -> int:
    """保存数据到数据库
    
    Args:
        df: 包含股票事件数据的DataFrame
        date: 交易日期 (YYYY-MM-DD格式)
        
    Returns:
        成功保存的记录数
    """
    session = Session()
    processed_count = 0
    
    try:
        # 先删除表中该日期的旧数据
        deleted_count = session.query(StockGsrlGsdtEm).filter_by(trade_date=date).delete()
        logger.info(f"已删除 {deleted_count} 条 {date} 的旧数据")
        
        if df.empty:
            logger.warning(f"{date} 无有效数据")
            return 0
            
        # 准备批量数据并去重
        records = []
        seen = set()  # 用于记录已处理的(stock_code, event_type)组合
        
        for _, row in df.iterrows():
            key = (row['代码'], row['事件类型'])
            if key not in seen:
                seen.add(key)
                records.append({
                    'serial_number': row['序号'],
                    'stock_code': row['代码'],
                    'stock_name': row['简称'],
                    'event_type': row['事件类型'],
                    'event_detail': row['具体事项'],
                    'trade_date': date
                })
        
        if records:
            # 批量插入
            session.bulk_insert_mappings(StockGsrlGsdtEm, records)
            session.commit()
            processed_count = len(records)
            logger.info(f"成功保存 {processed_count} 条 {date} 的数据")
        
    except Exception as e:
        session.rollback()
        logger.error(f"保存数据失败: {str(e)}")
        raise
    finally:
        session.close()
        
    return processed_count

def fetch_data(date: str, max_retries: int = 3, retry_delay: int = 5) -> Optional[pd.DataFrame]:
    """从akshare获取数据
    
    Args:
        date: 交易日期 (YYYYMMDD格式)
        max_retries: 最大重试次数
        retry_delay: 重试间隔(秒)
        
    Returns:
        获取到的DataFrame或None
    """
    for attempt in range(1, max_retries + 1):
        try:
            logger.info(f"正在获取 {date} 的数据 (尝试 {attempt}/{max_retries})...")
            df = ak.stock_gsrl_gsdt_em(date=date)
            
            if df is None or not isinstance(df, pd.DataFrame):
                logger.warning(f"{date} 接口返回无效数据")
                return None
                
            if df.empty:
                logger.info(f"{date} 无有效数据")
                return None
                
            # 数据预处理
            df = df.dropna(subset=['代码', '事件类型'])
            df = df[df['代码'].str.strip() != '']
            
            if df.empty:
                logger.info(f"{date} 数据清洗后无有效记录")
                return None
                
            return df
            
        except Exception as e:
            logger.error(f"获取 {date} 数据时出错: {str(e)}")
            if attempt < max_retries:
                time.sleep(retry_delay)
    
    return None

def get_historical_data(start_date: str = "20210101", end_date: Optional[str] = None):
    """获取历史数据
    
    Args:
        start_date: 开始日期 (YYYYMMDD格式)
        end_date: 结束日期 (YYYYMMDD格式), 默认为当天
    """
    try:
        start = datetime.strptime(start_date, "%Y%m%d")
        end = datetime.strptime(end_date, "%Y%m%d") if end_date else datetime.now()
        
        current_date = start
        total_processed = 0
        
        while current_date <= end:
            date_str = current_date.strftime("%Y%m%d")
            df = fetch_data(date_str)
            
            if df is not None:
                processed = save_data(df, current_date.strftime("%Y-%m-%d"))
                total_processed += processed
            
            current_date += timedelta(days=1)
            time.sleep(1)  # 基础请求间隔
            
        logger.info(f"历史数据获取完成, 共处理 {total_processed} 条记录")
        
    except KeyboardInterrupt:
        logger.info("用户中断操作")
    except Exception as e:
        logger.error(f"获取历史数据失败: {str(e)}")
        raise

if __name__ == "__main__":
    try:
        create_table()
        get_historical_data()
    except Exception as e:
        logger.error(f"程序运行失败: {str(e)}")
        raise SystemExit(1)
