#!/bin/env python3

import json
import logging
import os
import re
import subprocess
import sys
import tempfile
from pathlib import Path
from typing import Callable, Iterable, List, Optional, Set, Union

import networkx as nx
import tqdm

from chernc.codeql.codeql_database import CodeqlDatabase
from chernc.constants import (CHERNC_RESOURCES_DIR, DEFAULT_LOGGER_DIR,
                              DEFAULT_LOGGER_FILE_NAME)
from chernc.logging import build_logger
from chernc.utils.hash_utils import calculate_md5

from .code_chunk import CodeChunk, CodeChunkPool, MergeFailed
from .code_slice import CodeSlice, CodeSlicePool

logger: logging.Logger = build_logger(__name__, logger_filename=DEFAULT_LOGGER_FILE_NAME, logger_dir=DEFAULT_LOGGER_DIR)

C_SUFFIXES = [".c"]
CPP_SUFFIXES = [".cpp", ".cc", ".cxx"]

from .builtin_macros import BUILTIN_MACROS


def inside_project(path: Union[Path, str], project_path: Union[Path, str]) -> bool:
    if isinstance(path, Path):
        path = str(path)
    if isinstance(project_path, Path):
        project_path = str(project_path)
    path = path.lstrip()
    return path.startswith(project_path.lstrip())


class Slicer:
    def __init__(self, codeql_database: CodeqlDatabase, config: List[str] = []):
        project_hash = calculate_md5(os.path.abspath(codeql_database.project_path))
        self.codeql_database: CodeqlDatabase = codeql_database
        self.code_slices: CodeSlicePool = CodeSlicePool(namespace=f"CodeSlice-{project_hash}")
        self.c_files: Set[str] = set()
        self.cpp_files: Set[str] = set()
        self.main_files: Set[str] = set()
        # compiler = "clang++" if args.cpp else "clang"
        self.config_macros = []
        for macro in config:
            macro = macro.split("=", 1)
            if len(macro) == 1:
                self.config_macros.append((macro[0], ""))
            else:
                self.config_macros.append((macro[0], macro[1]))

    def _load_include_path(self, path: Union[str, Path]) -> None:
        with open(path, "r", encoding="utf-8") as f:
            self.include_paths = [p for p, in json.load(f)["#select"]["tuples"]]

    def _load_macros(self, path: Union[str, Path]) -> None:
        with open(path, "r", encoding="utf-8") as f:
            for name, loc, value in json.load(f)["#select"]["tuples"]:
                m = re.match(r"file://(.*):(.*):.*:(.*):.*", loc)
                if m is None:
                    continue

                # if the macro locates in file://:0:0:0:0, it may be a config macro
                if m[1] == "":
                    if name == "" or name in BUILTIN_MACROS:
                        continue
                    self.config_macros.append((name, value))
                if not inside_project(m[1], self.codeql_database.project_path):
                    continue
                file = m[1]
                start_line = int(m[2])
                end_line = int(m[3])

                code = CodeSlice(pool=None, file=file, start_line=start_line, end_line=end_line)
                code.decls.append(("Macro", name))
                self.code_slices.add_code_slice(code)

    def _load_source_files(self, path: Union[str, Path]) -> None:
        with open(path, "r", encoding="utf-8") as f:
            for file, type in json.load(f)["#select"]["tuples"]:
                if not inside_project(file, self.codeql_database.project_path):
                    continue
                if type == "CFile":
                    self.c_files.add(file)
                elif type == "CppFile":
                    self.cpp_files.add(file)
                else:
                    logger.warning(f"{file} is not a C or Cpp file, but {type}")
                self.getTopLevelSlices(file)

    def _load_defintions(self, path: Union[str, Path]) -> None:
        with open(path, "r", encoding="utf-8") as f:
            for type, loc, name in json.load(f)["#select"]["tuples"]:
                m = re.match(r"file://(.*):(.*):.*:(.*):.*", loc)
                if m is None:
                    continue
                file = m[1]
                start_line = int(m[2])
                end_line = int(m[3])
                if not inside_project(m[1], self.codeql_database.project_path):
                    continue
                if name == "main":
                    self.main_files.add(file)
                code = self.code_slices.find_code_slice(file, start_line, end_line)
                if code is not None:
                    code.decls.append((type, name))
                else:
                    logger.warning(f"{type} @ {file}:{start_line}-{end_line} is not covered by any code slices")

    def _load_dependency(self, results_path: Path) -> None:

        code_slices = self.code_slices
        dependencies = set()
        for file in results_path.glob("*.json"):
            with open(file, "r", encoding="utf-8") as f:
                dependencies.update(tuple(t) for t in json.load(f)["#select"]["tuples"])

        for depender, dependee in dependencies:
            mer = re.match(r"file://(.*):(.*):.*:(.*):.*", depender)
            mee = re.match(r"file://(.*):(.*):.*:(.*):.*", dependee)
            if mer is None or mee is None:
                continue
            if not inside_project(mer[1], self.codeql_database.project_path) or not inside_project(
                mee[1], self.codeql_database.project_path
            ):
                continue

            depender = code_slices.find_code_slice(mer[1], int(mer[2]), int(mer[3]))
            dependee = code_slices.find_code_slice(mee[1], int(mee[2]), int(mee[3]))

            if depender is None:
                logger.warning(f"{mer[1]}:{mer[2]}-{mer[3]} is not covered by any top level code slices")
                continue

            if dependee is None:
                logger.warning(f"{mee[1]}:{mee[2]}-{mee[3]} is not covered by any top level code slices")
                continue
            if dependee.file in self.main_files and depender.file in self.main_files and dependee.file != depender.file:
                logger.debug(f"{depender.loc} depends on {dependee.loc}, but they are in different main files.")
                continue
            # if dependee.file != depender.file and not dependee.file in nx.descendants(self.file_relation, depender.file):
            #     logger.debug(f"{depender.loc} depends on {dependee.loc}, but its file doesn't.")
            #     continue
            depender.depends_on(dependee)

    def slice(
        self, results_path: Optional[str] = None, max_size: int = 4000 * 4, size_calc: Callable[[str], int] = len
    ) -> List[CodeChunk]:
        """
        Args:
            results_path (Optional[str]):
            max_size (int): the maximum size for each chunk's size plus its dependencies' sizes.
            size_calc (Callable[[str], int]): a function to calculate the size of a string.
        Returns:
            List[CodeChunk]: the code chunks in topological order.

        """
        if results_path is None:
            temp_dir = tempfile.TemporaryDirectory()
            query_results_path = Path(temp_dir.name)
        else:
            temp_dir = None
            query_results_path = Path(results_path)
            if not os.path.exists(query_results_path):
                os.makedirs(query_results_path, exist_ok=True)
        logger.info(f"query results will be saved in {query_results_path.name}")
        meta_results = query_results_path / "meta"
        dependency_results = query_results_path / "dependency"
        os.makedirs(meta_results, exist_ok=True)
        os.makedirs(dependency_results, exist_ok=True)
        try:
            self.codeql_database.run_queries(queries_path=os.path.join(CHERNC_RESOURCES_DIR, "ql/meta"))
            self.codeql_database.decode_results(
                queries_path=os.path.join(CHERNC_RESOURCES_DIR, "ql/meta"),
                pack="meta",
                query_results_path=meta_results,
            )
            self._collect_metadata(meta_results)

            self.codeql_database.run_queries(queries_path=os.path.join(CHERNC_RESOURCES_DIR, "ql/dependency"))
            self.codeql_database.decode_results(
                queries_path=os.path.join(CHERNC_RESOURCES_DIR, "ql/dependency"),
                pack="dependency",
                query_results_path=dependency_results,
            )
            self._load_dependency(dependency_results)

            self.code_slices.seal(size_calc)

            chunks = CodeChunkPool(
                slice_pool=self.code_slices,
                slices=self.code_slices.all_code_slices(),
                max_size=max_size,
            )
            # try:
            #     chunks.enable_size_constraint()
            # except BreakAtStart as e:
            #     logger.warning(e)

            checker = self.getMainExclusiveChecker()
            # try:
            #     chunks.add_constraint_checker("Main files can not be merge", checker)
            # except BreakAtStart as e:
            #     logger.warning(e)

            bar = tqdm.tqdm(total=len(chunks.chunks), desc="Merging Chunks", unit="chunk", file=sys.stdout)

            def bar_bind(n: int) -> None:
                nonlocal bar
                bar.n = n
                bar.refresh()
            
            constraint_checkers = [
                ("Main files can not be merge", checker),
                ("chunk's size + its depended slices' sizes <= max size", chunks.under_max_size),
            ]

            # chunks.bind_chunks_number(bind)
            # hearustic: put ctypedef and its orignin together
            for slice in self.code_slices.all_code_slices():
                if slice.isTypedef() and len(slice.depends) == 1:
                    try:
                        chunks.put_together(
                            slice.depends | {slice},
                            binders=(bar_bind,),
                            constraint_checkers=constraint_checkers,
                        )
                    except MergeFailed:
                        pass

            chunks.modularity_strategy(binders=[bar_bind], constraint_checkers=constraint_checkers)
            chunks.share_dependency_strategy(binders=[bar_bind], constraint_checkers=constraint_checkers)
            bar.close()
            results = chunks.get_order()
        except Exception as e:
            if results_path is None:
                if temp_dir is not None:
                    temp_dir.cleanup()
            raise e

        return results

    def getMainExclusiveChecker(self):
        def checker(chunks: Iterable[CodeChunk]):
            files = {s.file for c in chunks for s in c.slices}
            return sum(f in self.main_files for f in files) <= 1

        return checker

    def _collect_metadata(self, query_results_path: Path):
        # collect result files
        self._load_include_path(query_results_path / "include_path.json")
        self._load_macros(query_results_path / "macro.json")
        self._load_source_files(query_results_path / "source_files.json")
        self._load_defintions(query_results_path / "definition.json")
        self._load_const_global_variables(query_results_path / "const_global.json")

        self.file_relation: nx.DiGraph = nx.DiGraph()
        file_relation = self.file_relation
        file_relation.add_nodes_from(self.c_files | self.cpp_files)

        with open(query_results_path / "include.json", "r", encoding="utf-8") as f:
            for file1, file2 in json.load(f)["#select"]["tuples"]:
                if not inside_project(file1, self.codeql_database.project_path) or not inside_project(
                    file2, self.codeql_database.project_path
                ):
                    continue
                file_relation.add_edge(file1, file2)
        with open(query_results_path / "header_decl.json", "r", encoding="utf-8") as f:
            for file1, file2 in json.load(f)["#select"]["tuples"]:
                if not inside_project(file1, self.codeql_database.project_path) or not inside_project(
                    file2, self.codeql_database.project_path
                ):
                    continue
                file_relation.add_edge(file1, file2)

    def _load_const_global_variables(self, result_path):
        with open(result_path, "r", encoding="utf-8") as f:
            for loc, name in json.load(f)["#select"]["tuples"]:
                m = re.match(r"file://(.*):(.*):.*:(.*):.*", loc)
                if m is None:
                    continue
                file = m[1]
                start_line = int(m[2])
                end_line = int(m[3])
                if not inside_project(m[1], self.codeql_database.project_path):
                    continue

                code = self.code_slices.find_code_slice(file, start_line, end_line)
                if code is not None:
                    code.extra["const"].append(name)

    def getTopLevelSlices(self, file: str):
        logger.info(f"slicing {file}")
        if file in self.cpp_files:
            compiler = "clang++"
        elif file in self.c_files:
            compiler = "clang"
        else:
            raise ValueError("Cannot determine C or Cpp: " + file)
        commands = (
            [compiler, "-Xclang", "-ast-dump=json", "-fsyntax-only", "-w", file]
            + ["-I" + path for path in self.include_paths]
            + [f"-D{macro}={value}" for macro, value in self.config_macros]
        )
        logger.info(" ".join(commands))
        subprocess_result = subprocess.run(
            commands,
            text=True,
            check=False,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )
        # 可能产生错误，我们这里忽略
        if subprocess_result.returncode != 0 and subprocess_result.stderr is not None:
            logger.error(subprocess_result.stderr)
        ast = json.loads(subprocess_result.stdout)
        # file property is shared through out the ast, so we need to keep track of the real file

        codes = []

        def iterate(entities, file):
            nonlocal codes

            for entity in entities:
                if len(entity["loc"]) == 0:
                    continue
                if "expansionLoc" in entity["loc"]:
                    loc = entity["loc"]["expansionLoc"]
                    if "file" in entity["loc"]["spellingLoc"]:
                        file = entity["loc"]["spellingLoc"]["file"]
                    if "file" in entity["loc"]["expansionLoc"]:
                        file = entity["loc"]["expansionLoc"]["file"]
                else:
                    loc = entity["loc"]
                    if "file" in entity["loc"]:
                        file = entity["loc"]["file"]
                if "expansionLoc" in entity["range"]["begin"]:
                    begin = entity["range"]["begin"]["expansionLoc"]
                else:
                    begin = entity["range"]["begin"]
                if "expansionLoc" in entity["range"]["end"]:
                    end = entity["range"]["end"]["expansionLoc"]
                else:
                    end = entity["range"]["end"]
                # get header code slices by analyzing source files
                # directly parsing the header files doesn't work, because header files may depend on other header files included in the source file!

                if not inside_project(file, self.codeql_database.project_path):
                    continue

                if entity["kind"] == "NamespaceDecl" or entity["kind"] == "LinkageSpecDecl":
                    iterate(entity["inner"], file)
                    continue
                if not entity["kind"].endswith("Decl"):
                    logger.warning(f"{entity['kind']} in top-level, what is this ?")
                    continue
                if "line" not in loc:
                    continue
                start_line = loc["line"]
                end_line = start_line
                if "line" in begin:
                    start_line = begin["line"]
                    end_line = start_line
                if "line" in end:
                    end_line = end["line"]

                self.code_slices.add_code_slice(
                    CodeSlice(
                        pool=self.code_slices,
                        file=file,
                        start_line=start_line,
                        end_line=end_line,
                    )
                )

        iterate(ast["inner"], file)
