import argparse
import json
import os
import re
import subprocess
import sys
import threading
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from typing import List

import pytz

'''
在代码目录下执行
'''


class Analysis:
    def __init__(
        self, target_name: str, exclude_path: str, include_path: str,
        enable_ccache: bool, product_name: str, thread_nums: int
    ):
        self._chunk_size = 500
        self._product_name = product_name
        self._lock = threading.Lock()
        self._target_name = target_name
        self._thread_nums = thread_nums
        self._enable_ccache = enable_ccache
        self._handled_source_files = set()
        self._result_map = defaultdict(list)
        self._unprocessed_compile_command = []
        self._pool = ThreadPoolExecutor(max_workers=thread_nums)
        self._exclude_path = self._get_exclude_paths(exclude_path)
        self._include_path = include_path
        self._include_pattern = re.compile(r'#include\s+["<](.+?)[">]')
        self._compilers = [
            "prebuilts/clang/ohos/linux-x86_64/llvm/bin/clang++",
            "prebuilts/clang/ohos/linux-x86_64/llvm/bin/clang"
        ]
        self._out_path = f"out/{self._product_name}"

    @classmethod
    def _get_exclude_paths(cls, path: str) -> List[str]:
        if not path:
            return ["third_party/"]
        paths = [path.strip() for path in path.strip().split(",")] + ["third_party"]
        return paths
    
    def handle(self):
        try:
            self._execute_compile()
            self._generate_compile_commands()
            self._parse_compile_commands()
            self._post_compile()
            self._post_process()
        except Exception as e:
            print(f"error is {e}")
    
    def _execute_compile(self):
        print(f"[{self._get_now_time()}] start execute compile")
        cache_option = "--ccache" if self._enable_ccache else "--ccache=false"
        command = ['./build.sh', '--product-name', self._product_name, cache_option, "--build-target", f'{self._target_name}']
        print("command:{}".format(command))
        result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        if result.returncode != 0:
            print(f"[{self._get_now_time()}] execute compile fail,error info: {result.stderr.decode('utf-8')}")
            exit(1)
        print(f"[{self._get_now_time()}] execute compile successfully")
    
    def _generate_compile_commands(self):
        print(f"[{self._get_now_time()}] start generate compile commands")
        command = f"./prebuilts/build-tools/linux-x86/bin/ninja" \
                    f" -C out/{self._product_name}" \
                    f" -w dupbuild=warn -t commands {self._target_name} > commands.txt"
        result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        if result.returncode != 0:
            print(f"[{self._get_now_time()}] generate compile command fail,error info: {result.stderr.decode('utf-8')}")
            exit(1)
        print(f"[{self._get_now_time()}] generate compile command successfully")

    def _parse_compile_commands(self):
        print(f"[{self._get_now_time()}] start parse")
        with open("commands.txt", "r", encoding="utf-8") as f:
            self._do_parse_compile_commands(f)
    
    def _do_parse_compile_commands(self, f):
        lines = []
        while True:
            try:
                line = f.readline()
                if not line:
                    break
                if not any([compiler in line for compiler in self._compilers]):
                    continue
                file_path = self._get_source_file(line)
                if not file_path:
                    continue
                if file_path in self._handled_source_files:
                    self._unprocessed_compile_command.append(line)
                    continue
                self._handled_source_files.add(file_path)        
                lines.append(line)
            except Exception as e:
                print(f"read error is {e}")
                continue
        chunks = [lines[i:i + self._chunk_size] for i in range(0, len(lines), self._chunk_size)]
        for chunk in chunks:
            future_tasks = []
            for line in chunk:
                future_tasks.append(self._pool.submit(self._do_cpp_file_compile, line = line))
            for task in future_tasks:
                task.result()
    


    def _do_cpp_file_compile(self, line: str):
        try:
            file_path = self._get_source_file(line)
            if not file_path:
                return
            if any([path in file_path for path in self._exclude_path]):
                return
            if self._include_path not in file_path and len(self._exclude_path) > 0:
                return
            if not os.path.exists(file_path):
                file_path = f"{os.path.join(self._out_path, file_path)}"
            with open(file_path, "r", encoding="utf-8") as f:
                source_code = f.read()
            include_matches = self._include_pattern.findall(source_code)
            original_source = source_code
            for include in include_matches:
                _, ext = os.path.splitext(include)
                if ext in [".c", ".cpp"]:
                    continue
                include_name = extract_core_name(include)
                file_name_with_ext = os.path.basename(file_path)
                file_name, file_name_ext = os.path.splitext(file_name_with_ext)
                if include_name == file_name:
                    continue
                modified_code = source_code.replace(f'#include <{include}>', f'// #include <{include}>')
                modified_code = modified_code.replace(f'#include "{include}"', f'// #include "{include}"')

                with open(file_path, "w") as f:
                    f.write(modified_code)
                command = f"cd {self._out_path};{line.rstrip()}"
                
                result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
                if result.returncode != 0:
                    source_code = modified_code.replace(f'// #include "{include}"', f'#include "{include}"')
                    source_code = source_code.replace(f'// #include <{include}>', f'#include <{include}>')
                    continue
                source_code = modified_code
                self._update_result_map(file_path, include)
            with open(file_path, "w") as f:
                f.write(source_code)
            with open(file_path, "w") as f:
                f.write(original_source)
        except Exception as e:
            print(f"current path is: {os.getcwd()}: error is {e}")

    @classmethod
    def _get_source_file(cls, line: str) -> str:
        pattern = re.compile(r'-c\s+(\S+\.(?:cpp|c))')
        match = pattern.search(line)
        if not match:
            return ""
        file_path = match.group(1).replace("../", "")
        return file_path

    def _update_result_map(self, key: str, value: str):
        self._lock.acquire()
        try:
            self._result_map[key].append(value)
        finally:
            self._lock.release()
    
    def _post_compile(self):
        print(f"[{self._get_now_time()}] start execute post compile")
        parallel_compile_result_map = self._result_map
        self._result_map = defaultdict(list)
        file_set = set()
        commands = []
        unprocessed_commands = self._unprocessed_compile_command
        temp_unprocessed_commands = []
        while True:
            try:
                for line in unprocessed_commands:
                    file = self._get_source_file(line)
                    if not file:
                        continue
                    if file in file_set:
                        temp_unprocessed_commands.append(line)
                        continue
                    file_set.add(file)
                    commands.append(line)
                if not commands:
                    break
                unprocessed_commands = temp_unprocessed_commands
                file_set.clear()
                temp_unprocessed_commands = []
                self._post_parallel_compile(commands)
                commands = []
                for file, headers in self._result_map.items():
                    temp_headers = parallel_compile_result_map.get(file, [])
                    if not headers:
                        parallel_compile_result_map[file] = headers
                    else:
                        parallel_compile_result_map[file] = list(set(temp_headers).intersection(set(headers)))
                self._result_map = defaultdict(list)
            except Exception as e:
                print(f"parallel compile fail, error is {e}")
                break
        self._result_map = parallel_compile_result_map
        print(f"[{self._get_now_time()}] parse successfully")

    def _post_parallel_compile(self, commands: List[str]):
        future_tasks = []
        chunks = [commands[i:i + self._chunk_size] for i in range(0, len(commands), self._chunk_size)]
        for chunk in chunks:
            for command in chunk:
                future_tasks.append(self._pool.submit(self._do_cpp_file_compile, line=command))
            for task in future_tasks:
                task.result()
    
    def _post_process(self):
        with open("result.json", "w") as f:
            f.write(json.dumps(self._result_map))
    
    @classmethod
    def _get_now_time(cls) -> str:
        return datetime.now(pytz.timezone('Asia/Shanghai')).strftime("%Y~%m-%d %H:%M:%S")

def extract_core_name(include_name):
    match = re.search(r'([^/]+?)(?:\.\w+)?$', include_name)
    if match:
        core_name = match.group(1)
        return core_name
    else:
        return None

def parse_args(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("--product-name", required=False, help="product-name, default rk3568")
    parser.add_argument(
        "--target-name", required=True, help="compile target name")
    
    # 通过该参数指定路径下的c/cpp文件不会被扫描
    parser.add_argument(
        "--exclude-path", required=False, help="path of c/cpp need to exclude, multiple paths separated by commas"
    )
    parser.add_argument(
        "--include-path", required=False, default="", help="path of c/cpp need to include"
    )
    parser.add_argument(
        "--enable-ccache", required=False, default=False, help="whether to enable ccache, ccache is disabled by default"
    )
    parser.add_argument("--thread-nums", required=False, help="concurrent thread count")
    options = parser.parse_args(args)
    return options


def main(args):
    options = parse_args(args)
    product_name = options.product_name or "rk3568"
    instance = Analysis(
        target_name = options.target_name, exclude_path = options.exclude_path,
        enable_ccache = options.enable_ccache, include_path = options.include_path,
        product_name = options.product_name, thread_nums = int(options.thread_nums or 50)
    )
    instance.handle()


if __name__ == "__main__":
    sys.exit(main(sys.argv[1:]))