import argparse
import csv
import re
import json
from collections import defaultdict
from pathlib import Path

FILE_NAME_KEY = "file_name"
FUNCTION_NAME_KEY = "function_name"
FUNCTION_ID_KEY = "function_id"
FUNCTION_OFFSET_KEY = "function_offset"
LINE_TABLE_KEY = "line_table"
LINE_COUNT_KEY = "line_count"


class CoverageInfo:
    def __init__(self, source_file_path, pa_info, ast_info):
        self.source_file_path = source_file_path
        self.total_branches = 0
        self.total_functions = 0
        self.line_stats = defaultdict(int)  # 行号执行统计 {行号: 执行次数}
        self.func_stats = defaultdict(int)  # 函数执行统计 {函数名: 执行次数}
        self.branch_stats = {}
        self.function_definition_line = {}  # 函数定义行号 {行号: 函数名}
        self._init_pa_info(pa_info)
        self._init_ast_info(ast_info)

    def _init_pa_info(self, pa_info):
        """初始化pa信息"""
        for func_info in pa_info.values():
            self.func_stats[func_info[FUNCTION_NAME_KEY]] = 0
            line_table = func_info[LINE_TABLE_KEY]
            for offset in line_table:
                line_num = line_table[offset]
                self.line_stats[line_num] = 0

    def _init_ast_info(self, ast_info):
        # TODO: 初始化ast信息
        return

    def to_string(self) -> str:
        """将覆盖率数据转换为LCOV格式的字符串"""
        lines = []

        # 1. 源文件路径
        lines.append(f"SF:{self.source_file_path}.ets")

        # 2. 函数定义行号 (FN)
        for start_line, func_name in sorted(self.function_definition_line.items()):
            lines.append(f"FN:{start_line},{func_name}")

        # 3. 函数执行次数 (FNDA)
        for func_name, count in sorted(self.func_stats.items()):
            lines.append(f"FNDA:{count},{func_name}")

        # 4. 函数总数和命中数
        lines.append(f"FNF:{self.total_functions}")
        lines.append(f"FNH:{sum(1 for count in self.func_stats.values() if count > 0)}")

        # 5. 行覆盖率 (DA)
        for line_num, count in sorted(self.line_stats.items()):
            lines.append(f"DA:{line_num},{count}")

        # 6. 分支覆盖率 (BRDA/BRF/BRH)
        lines.append(f"BRF:{self.total_branches}")
        lines.append(f"BRH:{sum(1 for br in self.branch_stats.values() if br > 0)}")

        # 7. 结束标记
        lines.append("end_of_record")

        return "\n".join(lines) + "\n"  # 确保最后有换行符

    def record_line_execution(self, line_num, line_count):
        self.line_stats[line_num] += line_count

    def record_func_execution(self, func_name):
        self.func_stats[func_name] += 1


class CoverageInfoGenerator:
    def __init__(self, pa_file_path, runtime_file_path, ast_file_path):
        self.coverage_info_dict = {}

        # 存储解析后的函数信息 {方法id: {文件名, 函数名, 行号表: {偏移: 行号}}
        self.pa_info = {}
        self.ast_info = {}

        self._parse_pa_file(pa_file_path)
        self._parse_ast_file(ast_file_path)
        self._init_coverage_info()
        self._parse_runtime_info_file(runtime_file_path)

    def _init_coverage_info(self):
        for func_info in self.pa_info.values():
            source_file_path = func_info[FILE_NAME_KEY]
            self.coverage_info_dict[source_file_path] = CoverageInfo(
                source_file_path, self.pa_info, self.ast_info
            )

    def _parse_runtime_info_file(self, runtime_file_path):
        """解析RuntimeInfo文件中的行号信息"""
        if not Path(runtime_file_path).exists():
            raise FileNotFoundError(f"文件 {runtime_file_path} 不存在")
        if not runtime_file_path.endswith(".csv"):
            raise ValueError(f"文件 {runtime_file_path} 格式错误，应为csv文件")
        with open(runtime_file_path, "r", encoding="utf-8") as file:
            reader = csv.DictReader(file)
            # csv文件格式:
            # function_id,function_offset,line_count
            # 58146,34,5
            # 58146,0,1
            # ...
            for row in reader:
                func_id = int(row[FUNCTION_ID_KEY])
                func_offset = int(row[FUNCTION_OFFSET_KEY])
                line_count = int(row[LINE_COUNT_KEY])
                source_file_path = self.pa_info[func_id][FILE_NAME_KEY]
                self.record_execution(
                    source_file_path, func_id, func_offset, line_count
                )

    def _parse_ast_file(self, ast_file_path):
        """解析AST文件中的行号信息"""
        if not Path(ast_file_path).exists():
            raise FileNotFoundError(f"文件 {ast_file_path} 不存在")
        if not ast_file_path.endswith(".json"):
            raise ValueError(f"文件 {ast_file_path} 格式错误，应为json文件")
        if Path(ast_file_path).stat().st_size == 0:
            raise ValueError(f"文件 {ast_file_path} 为空")

        try:
            with open(ast_file_path, "r", encoding="utf-8") as file:
                data = json.load(file)
        except json.JSONDecodeError as e:
            raise ValueError(f"文件 {ast_file_path} JSON格式错误: {e}")
        # TODO: 解析AST文件中的分支信息

    def _parse_pa_file(self, pa_file_path):
        """解析汇编文件并构建函数信息字典"""

        if not Path(pa_file_path).exists():
            raise FileNotFoundError(f"文件 {pa_file_path} 不存在")
        if not pa_file_path.endswith(".pa"):
            raise ValueError(f"文件 {pa_file_path} 格式错误，应为pa文件")

        with open(pa_file_path, "r", encoding="utf-8") as file:
            content = file.read()
        function_pattern = re.compile(
            r"\.function\s+\w+\s+([^\.\s<]+)(?:\.[^\.\s<]+)*\.([^\.\s<]+)\([^)]*\)\s+<.*?>\s+\{\s*#\s+offset:\s+(0x[0-9a-fA-F]+).*?"
            r"#\s+LINE_NUMBER_TABLE:\s*((?:#\s*line\s+\d+:\s+\d+\s*)+)",
            re.DOTALL,
        )
        for match in function_pattern.finditer(content):
            source_file_path = match.group(1)
            func_name = match.group(2).strip()
            func_id = int(match.group(3), 16)
            line_table_text = match.group(4)

            # 解析行号表
            line_pattern = re.compile(r"line\s+(\d+):\s+(\d+)")
            line_table = {
                int(line_match.group(2)): int(line_match.group(1))
                for line_match in line_pattern.finditer(line_table_text)
            }

            self.pa_info[func_id] = {
                FILE_NAME_KEY: source_file_path,
                FUNCTION_NAME_KEY: func_name,
                LINE_TABLE_KEY: line_table,
            }

    def record_execution(self, source_file_path, func_id, func_offset, line_count):
        if source_file_path not in self.coverage_info_dict:
            self.coverage_info_dict[source_file_path] = CoverageInfo(source_file_path)

        func_info = self.pa_info.get(func_id)
        if not func_info:
            return

        # 找到小于等于指令偏移量的最大键
        matched_offset = None
        line_table = func_info[LINE_TABLE_KEY]
        for offset in line_table.keys():
            if offset <= func_offset:
                matched_offset = offset
            else:
                break

        if matched_offset is None or matched_offset not in line_table:
            return
        coverage_info = self.coverage_info_dict[source_file_path]
        if matched_offset == 0:
            func_name = func_info[FUNCTION_NAME_KEY]
            coverage_info.record_func_execution(func_name)
        line_num = line_table[matched_offset]
        coverage_info.record_line_execution(line_num, line_count)

    def generate_coverage_info(self, output_file_path):
        if output_file_path is None:
            output_file_path = "lcov.info"
        if not output_file_path.endswith(".info"):
            output_file_path += ".info"
        with open(output_file_path, "w") as file:
            file.write("TN:\n")
            for source_file_path in self.coverage_info_dict:
                coverage_info = self.coverage_info_dict[source_file_path]
                file.write(coverage_info.to_string())
        print(f"覆盖率信息已导出到 {output_file_path}")


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("pa_file_path", help="反汇编文件")
    parser.add_argument("ast_file_path", help="AST信息文件")
    parser.add_argument("runtime_file_path", help="运行时信息文件")
    parser.add_argument("--output", type=str, help="生成文件名")
    args = parser.parse_args()
    try:
        coverage_info_generator = CoverageInfoGenerator(
            args.pa_file_path, args.runtime_file_path, args.ast_file_path
        )
        coverage_info_generator.generate_coverage_info(args.output)
    except Exception as e:
        print(f"解析错误: {e}")
    except FileNotFoundError as e:
        print(f"文件未找到: {e}")
    except ValueError as e:
        print(f"文件格式错误: {e}")
