import argparse
import asyncio
from pathlib import Path
from rosbags.rosbag1 import Reader as Rosbag1Reader
from rosbags.typesys import get_typestore, Stores, get_types_from_msg
import sys
import json
import os
from .customchange import process_custom_topic_conversion

try:
    import numpy as np
    NUMPY_AVAILABLE = True
except ImportError:
    NUMPY_AVAILABLE = False
    # Consider logging this: logger.info("Numpy not available. Numpy-specific type handling will be skipped.")
from typing import Optional, List

from apis.datebase.database_interface import AsyncSQLiteDB

# 尝试导入项目专用日志记录器和数据库接口
try:
    from gglobal import logger

except ImportError as e:
    # 如果找不到项目专用日志记录器，回退到标准日志记录
    import logging
    # 使用项目统一的logger，避免重复配置
try:
    from gglobal import logger
except ImportError:
    import logging
    logger = logging.getLogger(__name__)
    logger.error(f"导入项目专用模块失败：{e}。请确保'setting.setting'和'apis.datebase.database_interface'模块可用。")


def _msg_to_dict(msg):
        """自适应地将所有 ROS 数据类型转换为字典"""
        # 检查是否为基本类型或非 ROS 对象
        if not hasattr(msg, '__msgtype__'):
            logger.info(f"没有找到 __msgtype__ 属性，类型为 {type(msg)}，值：{msg}")
            if isinstance(msg, (str, int, float, bool)):
                return msg
            elif isinstance(msg, (bytes, bytearray)):
                return msg.hex()  # 转换为十六进制字符串
            elif isinstance(msg, np.ndarray):
                return msg.tolist()  # 转换为列表
            elif msg is None:
                return None
            else:
                logger.warning(f"未知类型 {type(msg)}，值：{msg}，转为字符串")
                return str(msg)

        # 处理 ROS 消息对象
        result = {}
        for attr in dir(msg):
            if (attr.startswith('_') or 
                attr in ['__msgtype__', '_md5sum', '_type', 'get_fields_and_field_types', 
                         'SLOT_TYPES', 'get_field_names']):
                continue
            
            try:
                value = getattr(msg, attr)
            except AttributeError:
                logger.warning(f"无法获取属性 {attr}，跳过")
                continue

            # logger.info(f"处理属性：{attr}，值类型：{type(value)}")

            # 处理列表或数组
            if isinstance(value, (list, tuple)):
                result[attr] = [_msg_to_dict(item) for item in value]
            # 处理 NumPy 数组（多维数组）
            elif isinstance(value, np.ndarray):
                result[attr] = value.tolist()
            # 处理嵌套 ROS 消息对象
            elif hasattr(value, '__msgtype__'):
                result[attr] = _msg_to_dict(value)
            # 处理时间类型（Time 或 Duration）
            elif hasattr(value, 'sec') and hasattr(value, 'nanosec'):
                result[attr] = {
                    "sec": value.sec,
                    "nanosec": value.nanosec
                }
            # 基本类型
            else:
                result[attr] = value
        
        return result

class RosBagConverter:
    def __init__(self, bag_path: Path, db_path_override: Optional[Path] = None, target_topics: Optional[List[str]] = None, custom_msg_path: Optional[Path] = None):
        """初始化ROS包转换器

        参数:
            bag_path: ROS包文件路径
            db_path_override: 可选，覆盖默认的数据库文件路径
            target_topics: 可选，要保存的主题列表
            custom_msg_path: 可选，自定义消息类型定义文件所在的根目录路径
        """
        self.bag_path = bag_path
        
        if db_path_override:
            self.db_path = db_path_override
        else:
            # 默认保存路径：./db/<bag_file_name>/<bag_file_name>.db
            workspace_root = Path(__file__).resolve().parent.parent.parent 
            db_dir = workspace_root / "db" / self.bag_path.stem
            db_dir.mkdir(parents=True, exist_ok=True) # 创建目录，如果不存在
            self.db_path = db_dir / f"{self.bag_path.stem}.db"
            
        self.db_interface = AsyncSQLiteDB(self.db_path)
        self.target_topics = target_topics
        self.custom_msg_path = custom_msg_path
        self.progress_callback = None  # Initialize progress_callback
        self.cancel_requested = False  # Add cancellation flag
        self._last_progress_time = 0  # For throttling progress updates

        # 初始化类型存储
        try:
            self.typestore = get_typestore(Stores.ROS1_NOETIC)
        except Exception as e:
            logger.warning(f"初始化默认ROS1类型存储失败：{e}。将尝试重新创建。")
            self.typestore = get_typestore(Stores.ROS1_NOETIC) # 尝试再次获取
        
        if self.custom_msg_path:
            self._register_custom_types(self.custom_msg_path)
        else:
            # 提供一个默认的自定义消息路径，如果用户没有指定
            default_msg_dir = Path(__file__).parent / 'msg'
            if default_msg_dir.exists():
                logger.info(f"未指定自定义消息路径，尝试使用默认路径: {default_msg_dir}")
                self._register_custom_types(default_msg_dir)
            else:
                logger.info("未指定自定义消息路径，且默认路径不存在，将仅使用内置类型。")

    async def _init_db(self):
        """使用AsyncSQLiteDB初始化SQLite数据库并创建必要的表。"""
        # 定义表结构
        topics_columns = "id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL UNIQUE, type TEXT NOT NULL, serialization_format TEXT NOT NULL"
        messages_columns = "id INTEGER PRIMARY KEY AUTOINCREMENT, topic_id INTEGER NOT NULL, timestamp INTEGER NOT NULL, data TEXT NOT NULL, FOREIGN KEY (topic_id) REFERENCES topics (id)"

        # 检查并删除已存在的数据库文件
        if self.db_interface.db_path.exists():
            try:
                await self.db_interface.close()  # 确保关闭任何可能的连接
                self.db_interface.delete_db_file()
                logger.info(f"已删除现有数据库文件：{self.db_interface.db_path}")
            except Exception as e:
                logger.error(f"删除数据库文件失败：{e}")
                raise RuntimeError(f"删除数据库文件失败：{e}")  # 抛出运行时异常而不是退出程序

        # 连接到新的数据库文件
        try:
            await self.db_interface.connect()
            logger.info(f"已连接到数据库：{self.db_interface.db_path}")
        except Exception as e:
            logger.error(f"连接数据库失败：{e}")
            raise RuntimeError(f"连接数据库失败：{e}")

        # 创建必要的表
        try:
            await self.db_interface.create_table("topics", topics_columns)
            await self.db_interface.create_table("messages", messages_columns)
            logger.info("成功创建数据库表。")
        except Exception as e:
            logger.error(f"创建数据库表失败：{e}")
            raise RuntimeError(f"创建数据库表失败：{e}")
        
        logger.info(f"数据库初始化完成，数据表已创建于：{self.db_path}")

    def _register_custom_types(self, msg_root_path: Path):
        """注册自定义消息类型，覆盖已存在的类型。"""
        if not self.typestore:
            logger.error("Typestore 未初始化，无法注册自定义类型。")
            return

        logger.info(f"开始从 {msg_root_path} 注册自定义消息类型...")
        found_msg_files = False
        for pkg_dir in msg_root_path.iterdir():
            if pkg_dir.is_dir():
                pkg_name = pkg_dir.name
                for msg_file in pkg_dir.glob('*.msg'):
                    found_msg_files = True
                    msg_name = msg_file.stem
                    # ROS2 style path, e.g., geometry_msgs/msg/Point
                    ros2_type_name = f"{pkg_name}/msg/{msg_name}"
                    # ROS1 style path, e.g., geometry_msgs/Point
                    ros1_type_name = f"{pkg_name}/{msg_name}"

                    try:
                        # 检查类型是否已存在
                        if ros2_type_name in self.typestore.types:
                            logger.info(f"跳过已存在的消息类型: {ros2_type_name}")
                            # 确保ROS1风格的别名也存在
                            if ros1_type_name not in self.typestore.types:
                                self.typestore.types[ros1_type_name] = self.typestore.types[ros2_type_name]
                                logger.debug(f"为已存在类型创建ROS1别名: {ros1_type_name} => {ros2_type_name}")
                            continue
                        
                        # 类型不存在，进行注册
                        msg_definition = msg_file.read_text(encoding='utf-8')
                        # 注册为ROS2风格，rosbags内部似乎以此为基础
                        types_dict = get_types_from_msg(msg_definition, ros2_type_name)
                        
                        try:
                            self.typestore.register(types_dict)
                            logger.info(f"成功注册自定义消息 (ROS2风格): {ros2_type_name}")
                            
                            # 创建ROS1风格的别名
                            if ros2_type_name in self.typestore.types:
                                self.typestore.types[ros1_type_name] = self.typestore.types[ros2_type_name]
                                logger.debug(f"创建/更新自定义类型别名: {ros1_type_name} => {ros2_type_name}")
                            else:
                                logger.warning(f"注册 {ros2_type_name} 后未在 typestore 中找到，无法创建ROS1别名 {ros1_type_name}")
                        except Exception as e:
                            if "already present with different definition" in str(e):
                                logger.warning(f"类型 {ros2_type_name} 已存在但定义不同，保留原有定义")
                            else:
                                raise
                    except Exception as e:
                        logger.error(f"处理自定义消息 {ros2_type_name} (从 {msg_file}) 失败: {e}", exc_info=True)
        
        if not found_msg_files:
            logger.warning(f"在路径 {msg_root_path} 下未找到任何 .msg 文件进行注册。")
        else:
            logger.info("自定义消息类型注册完成。")
            # 可选: 打印所有已注册的类型进行调试
            # logger.debug("当前已注册类型:")
            # for type_name in sorted(self.typestore.types.keys()):
            #     logger.debug(f"  - {type_name}")

    async def process_bag(self):
        """异步读取ROS包文件并将数据存入SQLite数据库。"""
        if not self.bag_path.exists():
            logger.error(f"未找到包文件：{self.bag_path}")
            return

        if self.progress_callback:
            self.progress_callback(f"Initializing database at {self.db_path}...")
        await self._init_db()
        topic_name_to_id = {}
        topics_to_insert = []

        try:
            with Rosbag1Reader(self.bag_path) as reader:
                logger.info(f"正在处理包文件：{self.bag_path}")
                if self.progress_callback:
                    self.progress_callback(f"Processing bag file: {self.bag_path.name}")
                logger.info(f"包文件时长：{reader.duration * 1e-9:.2f}秒，开始时间：{reader.start_time}，结束时间：{reader.end_time}，消息数量：{reader.message_count}")

                # 首先，准备话题数据用于插入
                if self.progress_callback:
                    self.progress_callback("Reading topic information...")
                for conn_obj in reader.connections:
                    topic_name = conn_obj.topic
                    # 如果设置了目标主题，则仅处理这些主题
                    if self.target_topics and topic_name not in self.target_topics:
                        logger.info(f"跳过非目标主题: {topic_name}")
                        continue
                    topic_type = conn_obj.msgtype
                    serialization_format = 'ros1_native_bag_format'
                    topics_to_insert.append((topic_name, topic_type, serialization_format))
                
                # 插入话题并获取它们的ID
                if topics_to_insert:
                    # 使用INSERT OR IGNORE来避免重复插入错误
                    try:
                        placeholders = ', '.join(['?'] * 3)  # name, type, serialization_format
                        sql = f"INSERT OR IGNORE INTO topics (name, type, serialization_format) VALUES ({placeholders})"
                        async with self.db_interface.conn.cursor() as cursor:
                            await cursor.executemany(sql, topics_to_insert)
                            await self.db_interface.conn.commit()
                        logger.info(f"尝试插入{len(topics_to_insert)}个话题（忽略重复）。")
                    except Exception as e:
                        logger.error(f"插入话题失败：{e}", exc_info=True)
                        raise
                    
                    # 获取话题ID（这是一个简化的方法，需要更健壮的错误处理和唯一性检查）
                    async with self.db_interface.conn.execute("SELECT id, name FROM topics") as cursor:
                        async for row in cursor:
                            topic_name_to_id[row[1]] = row[0]
                    logger.debug(f"已获取话题ID：{topic_name_to_id}")
                    if self.progress_callback:
                        self.progress_callback(f"Found {len(topic_name_to_id)} topics to process.")

                # 其次，处理并插入消息
                message_insert_batch = []
                batch_size = 1000  # 每1000条消息提交一次以提高性能
                count = 0
                # total_messages_in_bag = reader.message_count #bag包总条数
                processed_messages_for_callback = 0 # Counter for callback throttling
                import time
                
                for connection, timestamp, rawdata in reader.messages():
                    # Check for cancellation request
                    if self.cancel_requested:
                        logger.info("Conversion cancelled by user request")
                        if self.progress_callback:
                            self.progress_callback("Conversion cancelled")
                        return
                    topic_name = connection.topic
                    # 如果设置了目标主题，则仅处理这些主题的消息
                    if self.target_topics and topic_name not in self.target_topics:
                        continue # 跳过此消息
                    
                    if topic_name not in topic_name_to_id:
                        logger.warning(f"跳过未知话题ID的消息：{topic_name}。这可能是由于话题插入失败或ID获取不完整导致。")
                        continue
                    
                    topic_id = topic_name_to_id[topic_name]
                    # 解码消息
                    try:
                        # 确保 typestore 和 connection.msgtype 可用
                        if self.typestore and hasattr(connection, 'msgtype'):
                            msg = self.typestore.deserialize_ros1(rawdata, connection.msgtype)
                            # 将解码后的消息转换为JSON格式存储
                            try:
                                msg_dict = _msg_to_dict(msg)
                                data_to_store = json.dumps(msg_dict)
                                
                                # 处理自定义话题转换
                                converted_data = await process_custom_topic_conversion(
                                    topic_name, msg_dict, timestamp, topic_name_to_id, self.db_interface
                                )
                                if converted_data:
                                    # 处理单个转换结果或多个转换结果
                                    if isinstance(converted_data, list):
                                        # 多个转换结果（如/baselink_tf生成多个话题）
                                        for result in converted_data:
                                            converted_topic_id, converted_timestamp, converted_data_str = result
                                            message_insert_batch.append((converted_topic_id, converted_timestamp, converted_data_str))
                                    else:
                                        # 单个转换结果
                                        converted_topic_id, converted_timestamp, converted_data_str = converted_data
                                        message_insert_batch.append((converted_topic_id, converted_timestamp, converted_data_str))
                                
                            except Exception as json_e:
                                logger.warning(f"无法将消息 {topic_name} (类型: {connection.msgtype}) 转换为JSON: {json_e}. 将使用 str() 回退.")
                                data_to_store = str(msg) 
                        else:
                            logger.warning(f"无法解码消息，因为类型存储或消息类型不可用：话题 {topic_name}")
                            # 选择存储原始数据或跳过
                            data_to_store = rawdata # 或者可以选择跳过: continue
                    except Exception as e:
                        logger.error(f"解码或处理消息时出错：话题 {topic_name}，错误：{e}", exc_info=True)
                        data_to_store = rawdata # 解码失败，存储原始数据
                    
                    # 添加原始数据到批处理中（保留原始话题）
                    message_insert_batch.append((topic_id, timestamp, data_to_store))
                    count += 1

                    if len(message_insert_batch) >= batch_size:
                        try:
                            await self.db_interface.insert_data("messages", message_insert_batch, columns="topic_id, timestamp, data")
                            logger.debug(f"已插入{len(message_insert_batch)}条消息到数据库。总计：{count}")
                            processed_messages_for_callback += len(message_insert_batch)
                            
                            # Throttle progress updates to every 2 seconds
                            current_time = time.time()
                            if self.progress_callback and (current_time - self._last_progress_time >= 2.0):
                                self.progress_callback(f"Processed {count} messages")
                                self._last_progress_time = current_time
                            
                            message_insert_batch = []
                        except Exception as db_error:
                            logger.error(f"Database insertion error: {db_error}")
                            if self.progress_callback:
                                self.progress_callback(f"Database error: {str(db_error)}")
                            raise
                
                # 检查是否取消，如果取消则不插入剩余消息
                if message_insert_batch and not self.cancel_requested: # 插入剩余的消息
                    try:
                        await self.db_interface.insert_data("messages", message_insert_batch, columns="topic_id, timestamp, data")
                        logger.info(f"已插入剩余的{len(message_insert_batch)}条消息到数据库。总计：{count}")
                        if self.progress_callback:
                            self.progress_callback(f"Finalizing... Processed {count} messages")
                    except Exception as db_error:
                        logger.error(f"Final database insertion error: {db_error}")
                        if self.progress_callback:
                            self.progress_callback(f"Final insertion error: {str(db_error)}")
                        raise
                elif self.cancel_requested and message_insert_batch:
                    logger.info(f"转换已取消，跳过插入剩余的{len(message_insert_batch)}条消息")

                if not self.cancel_requested:
                    logger.info(f"包文件处理完成。总共处理消息数：{count}")
                    if self.progress_callback:
                        self.progress_callback("Bag file processing complete!")
                else:
                    logger.info(f"转换已取消。已处理消息数：{count}")
                    if self.progress_callback:
                        self.progress_callback("Conversion cancelled by user")

        except Exception as e:
            error_msg = f"处理包文件{self.bag_path}时出错：{e}"
            logger.error(error_msg, exc_info=True)
            if self.progress_callback:
                self.progress_callback(f"Error: {str(e)}")
            raise  # Re-raise the exception for proper error handling in calling code
        finally:
            if self.db_interface:
                try:
                    await self.db_interface.close()
                    logger.info("数据库连接已关闭。")
                except Exception as close_error:
                    logger.error(f"关闭数据库连接时出错：{close_error}")
    
    def cancel_conversion(self):
        """请求取消转换过程"""
        self.cancel_requested = True
        logger.info("Conversion cancellation requested")

async def main_async():
    parser = argparse.ArgumentParser(description="使用异步数据库接口将ROS1包文件转换为SQLite数据库。")
    parser.add_argument("--db_file", type=str, help="可选：输出SQLite数据库文件的路径。默认为包文件所在目录下的<bag_file_name>.db。")
    args = parser.parse_args()

    bag_path = Path(r"C:\Users\Administrator\Desktop\BaiduSyncdisk\mydir\ros-bag-win\db\20250207135800.bag")
    # db_path_override = Path(args.db_file) if args.db_file else None

    # 从__init__.py导入默认配置
    from apis.ros1 import target_topics as default_topics

    logger.info(f"目标主题：{default_topics}")

    converter = RosBagConverter(bag_path, target_topics=default_topics)
    await converter.process_bag()

if __name__ == "__main__":
    asyncio.run(main_async())