import json
import os
import traceback
from pathlib import Path
from conn_manager import get_faq_database_cursor, FAQ_DATA_DIR
import logging

logger = logging.getLogger(__name__)

# ---------- SQL ----------
SQL = """
SELECT
    c.NAME AS model,
    a.ask  AS title,
    a.question AS content
FROM
    faq a
    LEFT JOIN faq_category b ON a.id = b.faqId
    LEFT JOIN category c      ON b.categoryId = c.id
WHERE
    a.locale = 'en_US'
    AND a.ENABLE = 1;
"""

def export_faq_by_model(output_dir=None):
    """导出FAQ数据按型号分组"""
    logger.info("📋 开始导出FAQ数据 (使用FAQ专用数据库连接)...")
    
    if output_dir is None:
        # 使用配置中的FAQ数据目录
        output_dir = Path(FAQ_DATA_DIR)
        logger.info(f"📁 使用配置的FAQ数据目录: {FAQ_DATA_DIR}")
    else:
        output_dir = Path(output_dir)
    
    # 获取绝对路径用于日志
    abs_output_dir = output_dir.resolve()
    logger.info(f"📁 FAQ目标输出目录: {abs_output_dir}")
    
    try:
        output_dir.mkdir(exist_ok=True, parents=True)
        logger.info("✅ FAQ输出目录创建成功")
        
        # 验证目录是否真的存在
        if output_dir.exists():
            logger.info(f"✅ FAQ目录存在确认: {abs_output_dir}")
        else:
            logger.error(f"❌ FAQ目录创建失败: {abs_output_dir}")
            raise Exception(f"无法创建或访问FAQ目录: {abs_output_dir}")
        
        logger.info("🔍 开始执行SQL查询 (FAQ数据库)...")
        with get_faq_database_cursor() as cursor:
            cursor.execute(SQL)
            cols = [d[0] for d in cursor.description]
            rows = [dict(zip(cols, row)) for row in cursor.fetchall()]
            logger.info(f"📊 SQL查询完成，获取到 {len(rows)} 条FAQ记录")

        # --- 按 model 分组 ---
        logger.info("📦 开始按型号分组FAQ数据...")
        grouped = {}
        for row in rows:
            model = (row['model'] or 'UNKNOWN').strip()
            grouped.setdefault(model, []).append(row)

        logger.info(f"✅ FAQ数据分组完成，共 {len(grouped)} 个型号")
        for model, data in grouped.items():
            logger.info(f"  📱 {model}: {len(data)} 条FAQ")

        # --- 写出各自 txt ---
        logger.info("💾 开始写入FAQ文件...")
        successful_files = 0
        for model, data in grouped.items():
            try:
                safe_name = model.replace('/', '_').replace('\\', '_')
                out_file = output_dir / f"{safe_name}-faq.txt"
                abs_filepath = out_file.resolve()
                
                logger.info(f"💾 准备写入FAQ文件: {abs_filepath}")
                
                with out_file.open('w', encoding='utf-8') as f:
                    json.dump(data, f, ensure_ascii=False, indent=2)
                
                # 验证文件是否真的被创建
                if out_file.exists():
                    file_size = out_file.stat().st_size
                    logger.info(f"✅ 已导出 {len(data)} 条FAQ → {out_file.name} ({file_size} 字节)")
                else:
                    logger.error(f"❌ 文件创建失败: {abs_filepath}")
                    
                successful_files += 1
                
            except Exception as e:
                logger.error(f"❌ 导出FAQ文件失败 {model}: {str(e)}")
                logger.error(f"🔍 详细错误信息:\n{traceback.format_exc()}")

        logger.info(f"🎉 FAQ导出完成！成功导出 {successful_files}/{len(grouped)} 个文件")
        logger.info(f"📈 FAQ统计信息: 共 {len(rows)} 条记录，{len(grouped)} 个型号")
        
        # 最后列出目录中的所有文件
        try:
            files_in_dir = list(output_dir.glob("*-faq.txt"))
            logger.info(f"📋 FAQ目录中的文件列表:")
            for file in files_in_dir:
                file_size = file.stat().st_size
                logger.info(f"  📄 {file.name} ({file_size} 字节)")
        except Exception as e:
            logger.warning(f"⚠️ 无法列出FAQ目录文件: {str(e)}")
        
        return len(rows), len(grouped)
        
    except Exception as e:
        logger.error(f"❌ FAQ导出过程中出现错误: {str(e)}")
        logger.error(f"🔍 详细错误信息:\n{traceback.format_exc()}")
        raise

if __name__ == "__main__":
    logger.info("🚀 直接运行FAQ爬虫...")
    try:
        export_faq_by_model()
        logger.info("✅ FAQ爬虫运行完成")
    except Exception as e:
        logger.error(f"❌ FAQ爬虫运行失败: {str(e)}")
        logger.error(f"🔍 详细错误信息:\n{traceback.format_exc()}")
