#!/usr/bin/env python3
"""
最终版数据导入脚本，直接使用同步方式
"""
import json
import os
from datetime import datetime
from dotenv import load_dotenv
import sys
import logging

# 添加项目根目录到Python路径
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# 加载环境变量
load_dotenv()

# 直接构建数据库连接
POSTGRES_HOST = os.getenv("POSTGRES_HOST", "192.168.10.65")
POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432")
POSTGRES_DATABASE = os.getenv("POSTGRES_DATABASE", "lightrag")
POSTGRES_USER = os.getenv("POSTGRES_USER", "postgres")
POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "postgres")

# 构建同步数据库连接 URL (使用psycopg2而不是asyncpg)
DATABASE_URL = f"postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DATABASE}"

try:
    from sqlalchemy import create_engine, text
    from sqlalchemy.orm import sessionmaker
    from src.situation.models import SituationObject
except ImportError:
    # 如果psycopg2不可用，尝试使用asyncpg
    DATABASE_URL = f"postgresql+asyncpg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DATABASE}"
    from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
    from sqlalchemy import text
    from src.situation.models import SituationObject

# 字段映射配置
TYPE_MAPPING = {
    "infrastructure": "infrastructure",
    "maritime": "sea_target",
    "land": "land_target"
}

SUBTYPE_MAPPING = {
    # 基础设施
    "airport": "airport",
    "port": "seaport",

    # 海上目标
    "ship": "warship",

    # 陆上目标
    "military_base": "military_base"
}

DEPLOYMENT_STATUS_MAPPING = {
    True: "已部署",
    False: "待部署"
}

# 默认值配置
DEFAULT_VALUES = {
    "model": "未知",
    "organization": "未知单位",
    "deployment_status": "待部署",
    "detection_range": 0.0,
    "response_time": 0.0,
    "accuracy": 0.0,
    "perception_capability": "",
    "communication_capability": "",
    "applicable_scenarios": "",
    "location": "",
    "extra_metadata": {},
    "tags": []
}

def transform_target_data(target_data):
    """转换单个目标数据为业务代码格式"""
    try:
        # 基础字段映射
        transformed = {
            "code": target_data.get("id", ""),
            "name": target_data.get("name", ""),
            "description": target_data.get("description", ""),
            "location": target_data.get("location", ""),
            "model": DEFAULT_VALUES["model"],
            "organization": DEFAULT_VALUES["organization"],
            "deployment_status": DEPLOYMENT_STATUS_MAPPING.get(
                target_data.get("active"),
                DEFAULT_VALUES["deployment_status"]
            )
        }

        # 对象类型映射
        transformed["object_type"] = TYPE_MAPPING.get(
            target_data.get("type"),
            "infrastructure"
        )

        # 装备类别映射
        transformed["equipment_category"] = SUBTYPE_MAPPING.get(
            target_data.get("subtype"),
            "unknown"
        )

        # 构建类型特定数据
        type_specific_data = {
            "importance": target_data.get("importance", "general"),
            "discovery_time": target_data.get("discoveryTime", ""),
            "coordinates": "",
            "latitude": target_data.get("coordinates", {}).get("latitude", 0.0),
            "longitude": target_data.get("coordinates", {}).get("longitude", 0.0)
        }

        # 转换坐标为字符串格式
        coords = target_data.get("coordinates", {})
        if isinstance(coords, dict) and "latitude" in coords and "longitude" in coords:
            type_specific_data["coordinates"] = f"{coords['latitude']}, {coords['longitude']}"

        # 添加原始数据用于调试
        type_specific_data["original_data"] = target_data

        transformed["type_specific_data"] = type_specific_data

        # 设置默认值
        for key, default_value in DEFAULT_VALUES.items():
            if key not in transformed:
                transformed[key] = default_value

        # 设置创建时间
        current_time = datetime.now()
        transformed["created_at"] = current_time
        transformed["updated_at"] = current_time
        transformed["created_by"] = "system_import"
        transformed["updated_by"] = "system_import"

        return transformed

    except Exception as e:
        logger.error(f"转换目标数据失败 {target_data.get('id', 'unknown')}: {e}")
        return None

def import_targets_data_sync():
    """使用同步方式执行数据导入"""

    # 创建同步引擎
    engine = create_engine(DATABASE_URL, echo=False)

    # 创建会话工厂
    SessionLocal = sessionmaker(bind=engine)

    import_stats = {
        "total": 0,
        "success": 0,
        "failed": 0,
        "by_type": {},
        "errors": []
    }

    try:
        # 读取数据文件
        with open('test/data-situation/targets.json', 'r', encoding='utf-8') as f:
            data = json.load(f)

        targets = data.get('targets', [])
        import_stats["total"] = len(targets)

        logger.info(f"开始导入 {len(targets)} 个目标数据")
        logger.info(f"数据库连接URL: {DATABASE_URL}")

        session = SessionLocal()

        # 设置搜索路径
        session.execute(text("SET search_path TO public"))

        try:
            for i, target in enumerate(targets, 1):
                try:
                    # 转换数据
                    transformed_data = transform_target_data(target)
                    if not transformed_data:
                        import_stats["failed"] += 1
                        import_stats["errors"].append(f"目标 {target.get('id')}: 数据转换失败")
                        continue

                    # 检查code是否已存在
                    existing_stmt = text("""
                        SELECT COUNT(*) FROM situation_objects
                        WHERE code = :code AND is_deleted = false
                    """)
                    existing_result = session.execute(existing_stmt, {"code": transformed_data["code"]})
                    existing_count = existing_result.scalar()

                    if existing_count > 0:
                        logger.warning(f"目标 {transformed_data['code']} 已存在，跳过")
                        continue

                    # 创建数据库对象
                    db_obj = SituationObject(**transformed_data)
                    session.add(db_obj)

                    # 统计
                    obj_type = transformed_data["object_type"]
                    if obj_type not in import_stats["by_type"]:
                        import_stats["by_type"][obj_type] = 0
                    import_stats["by_type"][obj_type] += 1
                    import_stats["success"] += 1

                    logger.info(f"成功导入目标 {i}/{len(targets)}: {transformed_data['name']} ({transformed_data['code']})")

                except Exception as e:
                    import_stats["failed"] += 1
                    error_msg = f"目标 {target.get('id', 'unknown')}: {str(e)}"
                    import_stats["errors"].append(error_msg)
                    logger.error(error_msg)

            # 提交事务
            logger.info("正在提交事务到数据库...")
            session.commit()
            logger.info("事务提交成功！")

        except Exception as e:
            logger.error(f"处理数据时出错，正在回滚: {e}")
            session.rollback()
            raise
        finally:
            session.close()

        logger.info("数据导入完成！")
        logger.info(f"总数: {import_stats['total']}")
        logger.info(f"成功: {import_stats['success']}")
        logger.info(f"失败: {import_stats['failed']}")
        logger.info(f"按类型统计: {import_stats['by_type']}")

        if import_stats['errors']:
            logger.warning("导入过程中发生的错误:")
            for error in import_stats['errors']:
                logger.warning(f"  - {error}")

        return import_stats

    except Exception as e:
        logger.error(f"导入失败: {e}")
        import traceback
        logger.error(traceback.format_exc())
        raise
    finally:
        engine.dispose()

async def import_targets_data_async():
    """使用异步方式执行数据导入（备用方案）"""

    # 创建异步引擎
    engine = create_async_engine(DATABASE_URL, echo=False)

    # 创建会话工厂
    AsyncSessionLocal = async_sessionmaker(
        engine, class_=AsyncSession, expire_on_commit=False
    )

    import_stats = {
        "total": 0,
        "success": 0,
        "failed": 0,
        "by_type": {},
        "errors": []
    }

    try:
        # 读取数据文件
        with open('test/data-situation/targets.json', 'r', encoding='utf-8') as f:
            data = json.load(f)

        targets = data.get('targets', [])
        import_stats["total"] = len(targets)

        logger.info(f"开始导入 {len(targets)} 个目标数据")
        logger.info(f"数据库连接URL: {DATABASE_URL}")

        async with AsyncSessionLocal() as session:
            # 设置搜索路径
            await session.execute(text("SET search_path TO public"))

            for i, target in enumerate(targets, 1):
                try:
                    # 转换数据
                    transformed_data = transform_target_data(target)
                    if not transformed_data:
                        import_stats["failed"] += 1
                        import_stats["errors"].append(f"目标 {target.get('id')}: 数据转换失败")
                        continue

                    # 检查code是否已存在
                    existing_stmt = text("""
                        SELECT COUNT(*) FROM situation_objects
                        WHERE code = :code AND is_deleted = false
                    """)
                    existing_result = await session.execute(existing_stmt, {"code": transformed_data["code"]})
                    existing_count = existing_result.scalar()

                    if existing_count > 0:
                        logger.warning(f"目标 {transformed_data['code']} 已存在，跳过")
                        continue

                    # 创建数据库对象
                    db_obj = SituationObject(**transformed_data)
                    session.add(db_obj)

                    # 统计
                    obj_type = transformed_data["object_type"]
                    if obj_type not in import_stats["by_type"]:
                        import_stats["by_type"][obj_type] = 0
                    import_stats["by_type"][obj_type] += 1
                    import_stats["success"] += 1

                    logger.info(f"成功导入目标 {i}/{len(targets)}: {transformed_data['name']} ({transformed_data['code']})")

                except Exception as e:
                    import_stats["failed"] += 1
                    error_msg = f"目标 {target.get('id', 'unknown')}: {str(e)}"
                    import_stats["errors"].append(error_msg)
                    logger.error(error_msg)

            # 提交事务
            logger.info("正在提交事务到数据库...")
            await session.commit()
            logger.info("事务提交成功！")

        logger.info("数据导入完成！")
        logger.info(f"总数: {import_stats['total']}")
        logger.info(f"成功: {import_stats['success']}")
        logger.info(f"失败: {import_stats['failed']}")
        logger.info(f"按类型统计: {import_stats['by_type']}")

        if import_stats['errors']:
            logger.warning("导入过程中发生的错误:")
            for error in import_stats['errors']:
                logger.warning(f"  - {error}")

        return import_stats

    except Exception as e:
        logger.error(f"导入失败: {e}")
        import traceback
        logger.error(traceback.format_exc())
        raise
    finally:
        await engine.dispose()

if __name__ == "__main__":
    try:
        # 首先尝试同步方式
        import_targets_data_sync()
    except ImportError:
        # 如果psycopg2不可用，使用异步方式
        import asyncio
        asyncio.run(import_targets_data_async())