from collections import defaultdict, deque

from dataclasses import dataclass, field
import heapq
import itertools
import traceback
from typing import Any, Callable, Deque, Dict, Generator, Iterable, List, Optional, Set, Tuple, TypeAlias, Union
import networkx as nx
from sortedcontainers import SortedDict, SortedSet
from .code_slice import CodeSlice, CodeSlicePool, CodeSliceID

CodeChunkID: TypeAlias = str


class CodeChunk(object):
    """
    A code chunk is a group of code slices. A code chunk won't depend on itself.
    slices: List[CodeSlice]: code slices in this chunk in the topo order.
    """

    pool: Optional["CodeChunkPool"]
    id: CodeChunkID
    slices: List[CodeSlice]  # must in the topo order
    size: int
    level: int
    w_depends: Dict[CodeChunkID, int]
    w_depended: Dict[CodeChunkID, int]

    def __init__(self, pool: Optional["CodeChunkPool"], id: CodeChunkID) -> None:
        self.pool: Optional["CodeChunkPool"] = pool
        self.id: CodeChunkID = id
        self.slices: List[CodeSlice] = []
        self.size: int = 0
        self.w_depends: Dict[CodeChunkID, int] = defaultdict(int)
        self.w_depended: Dict[CodeChunkID, int] = defaultdict(int)
        self.level = 0

    @property
    def text(self) -> str:
        return "\n".join(s.text for s in self.slices)

    def swallow(self, other: "CodeChunk"):
        """
        This will put slices in other chunk to this chunk, update dependencies, and concat text.
        """
        if self.pool is None:
            raise ValueError("Cannot add CodeChunk as a dependency from None pool.")
        if self.pool != other.pool:
            raise ValueError("Cannot add CodeChunk as a dependency from different pool.")

        self.slices = sorted(self.slices + other.slices, key=lambda s: s.level)
        self.size += other.size
        for dep_i, w in list(other.w_depends.items()):
            self.depends_on(self.pool.get_chunk_by_id(dep_i), w)
            other.wont_depend_on(self.pool.get_chunk_by_id(dep_i))
        for dep_i, w in list(other.w_depended.items()):
            dep = self.pool.get_chunk_by_id(dep_i)
            dep.depends_on(self, w)
            dep.wont_depend_on(other)

    def __hash__(self):
        if hasattr(self, "id"):
            return hash(self.id)
        else:
            return super(CodeChunk, self).__hash__()

    def __eq__(self, other: object) -> bool:
        if not isinstance(other, CodeChunk):
            return NotImplemented
        return self.id == other.id

    def __lt__(self, other: object) -> bool:
        if not isinstance(other, CodeChunk):
            return NotImplemented
        return self.id < other.id

    @property
    def depends(self) -> Set["CodeChunk"]:
        if self.pool is None:
            raise ValueError("Cannot add CodeChunk as a dependency from None pool.")
        return {self.pool.get_chunk_by_id(dep_i) for dep_i in self.w_depends.keys()}

    @property
    def depended(self) -> Set["CodeChunk"]:
        if self.pool is None:
            raise ValueError("Cannot add CodeChunk as a dependency from None pool.")
        return {self.pool.get_chunk_by_id(dep_i) for dep_i in self.w_depended.keys()}

    def depends_on(self, dependency: Union["CodeChunk", CodeChunkID], weight: int):
        """
        This method add dependency of this code chunk. Do nothing if the dependency is itself.
        """
        if self.pool is None:
            raise ValueError("Cannot depends_on CodeChunk from None pool.")

        if isinstance(dependency, CodeChunkID):
            dependency = self.pool.get_chunk_by_id(dependency)

        if dependency.pool != self.pool:
            raise ValueError("Cannot depends_on CodeChunk from different pool.")

        if dependency == self:
            return

        self.w_depends[dependency.id] += weight
        dependency.w_depended[self.id] += weight

    def wont_depend_on(self, dependency: Union["CodeChunk", CodeChunkID]):
        """
        This method remove dependency of this code chunk. Do nothing if the dependency doesn't exist.
        """
        if self.pool is None:
            raise ValueError("Cannot wont_depend_on CodeChunk from None pool.")
        if isinstance(dependency, CodeChunkID):
            dependency = self.pool.get_chunk_by_id(dependency)
        if dependency.id not in self.w_depends:
            return
        del self.w_depends[dependency.id]
        del dependency.w_depended[self.id]

    def get_depend_weight(self, dependency: Union["CodeChunk", CodeChunkID]) -> int:
        if self.pool is None:
            raise ValueError("Cannot get CodeChunk as a dependency from None pool.")

        if isinstance(dependency, CodeChunkID):
            dep_id = dependency
        else:
            dep_id = dependency.id

        if dep_id not in self.w_depends:
            raise ValueError(f"CodeChunk {dep_id} is not a dependency of {self}.")

        return self.w_depends[dep_id]

    def get_depended_weight(self, depended: Union["CodeChunk", CodeChunkID]) -> int:
        if self.pool is None:
            raise ValueError("Cannot get CodeChunk as a depended from None pool.")

        if isinstance(depended, CodeChunkID):
            dep_id = depended
        else:
            dep_id = depended.id

        if dep_id not in self.w_depended:
            raise ValueError(f"CodeChunk {dep_id} is not a w_depended of {self}.")

        return self.w_depended[dep_id]


class MergeFailed(Exception):
    def __init__(self, name):
        self.message = f'Break constraint "{name}" in merge.'
        super().__init__(self.message)


class BreakAtStart(Exception):
    def __init__(self, name):
        self.message = f'Break constraint "{name}" at the start.'
        super().__init__(self.message)


def chunks_size(chunks: Iterable[CodeChunk]):
    return sum(chunk.size for chunk in chunks)


def has_main(chunk: CodeChunk) -> bool:
    return any(slice.isMain() for slice in chunk.slices)


class CodeChunkPool:
    """
    Code chunk manager ensuring code chunks satisfy some constraints at any time. The basic constraints are:
    - Dependencies of code chunks are acyclic.
    - Code chunk's size plus its dependencies' sizes are under a max size.
    Other constraints can be add to code chunk pool.
    """

    slice_pool: CodeSlicePool
    chunks: List[CodeChunk]
    chunk_of_slice: Dict[CodeSliceID, CodeChunkID]
    # level: Dict[CodeChunk, int]
    # layers: Dict[int, Set[CodeChunk]]
    max_size: int

    def __init__(
        self,
        slice_pool: CodeSlicePool,
        slices: Iterable[CodeSlice],
        max_size=4000,
    ):
        """
        Initialize code chunks
        Args:
            slices (Iterable[CodeSlice]): code slices
        """
        self.slice_pool = slice_pool
        self.max_size = max_size
        slices = list(slices)
        g = nx.DiGraph()
        g.add_nodes_from(slices)
        g.add_edges_from((slice, dep) for slice in slices for dep in slice.depends)
        # slices should be used only once, because it might be some generator
        components: List[List[CodeSlice]] = list(nx.strongly_connected_components(g))
        self.chunks = []
        self.chunk_of_slice = {}
        # self.binders = []
        # self.layers = defaultdict(set)
        for i, comp in enumerate(components):
            chunk = CodeChunk(pool=self, id=str(i))
            chunk.slices = sorted(comp, key=lambda s: s.level)
            chunk.size = sum(s.size for s in comp)
            if chunk not in self.chunks:
                self.chunks.append(chunk)
            for slice in comp:
                self.chunk_of_slice[slice.id] = chunk.id

        for slice in slices:
            chunk_id = self.chunk_of_slice[slice.id]
            chunk = self.get_chunk_by_id(chunk_id)
            for dep_slice_id, w in slice.w_depends.items():
                dep_chunk_id = self.chunk_of_slice[dep_slice_id]
                dep_chunk = self.get_chunk_by_id(dep_chunk_id)
                chunk.depends_on(dep_chunk, w)

        # initialize level
        # self.layers[0] = self.chunks.copy()
        for chunk in self.chunks:
            if len(chunk.depended) == 0:
                self.set_level(chunk)

    # def enable_size_constraint(self):
    #     self.add_constraint_checker(
    #         "chunk's size + its depended slices' sizes <= max size",
    #         self.under_max_size,
    #     )

    # def add_constraint_checker(self, name: str, checker: Callable[[Iterable[CodeChunk]], bool]):
    #     """
    #     Add a constraint checker to the code chunk pool.
    #     Args:
    #         name (str): the name of the constraint
    #         checker (Callable[[Set[CodeChunk]], bool]): the checker function
    #     """
    #     self.constraint_checkers.append((name, checker))
    #     for chunk in self.chunks:
    #         if not checker((chunk,)):
    #             raise BreakAtStart(name)

    def under_max_size(self, chunks: Iterable[CodeChunk]) -> bool:
        """
        Check if the size of chunks plus depended slices' sizes is under max size.
        Args:
            chunks (List[CodeChunk]): code chunks
        Returns:
            bool: whether the size of chunks is under max size.
        """
        dependencies: Set[CodeSlice] = set()
        for chunk in chunks:
            for slice in chunk.slices:
                for dep in slice.depends:
                    dep_chunk_id = self.chunk_of_slice[dep.id]
                    dep_chunk = self.get_chunk_by_id(dep_chunk_id)
                    if dep_chunk in chunks:
                        continue
                    dependencies.add(dep)

        return chunks_size(chunks) + sum(dep.size for dep in dependencies) <= self.max_size

    # def bind_chunks_number(self, binder):
    #     self.binders.append(binder)

    def merge(
        self,
        chunk1: CodeChunk,
        chunk2: CodeChunk,
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):
        """
        Merge chunk1 and chunk2. No merge is perfromed if chunk1 == chunk2. This will tell a higher one and a lower one from the two chunks, and merge all chunks on all paths from the higher to the lower into the higher one.
        This will keep acyclicity of chunks.
        Args:
            chunk1 (CodeChunk): chunk 1
            chunk2 (CodeChunk): chunk 2
        Returns:
            CodeChunk: the chunk that stays after merging.
        Raises:
            MergeFailed: if this merge exceed the max size
        """
        if chunk1 == chunk2:
            return chunk1
        if chunk1.level >= chunk2.level:
            lower, higher = chunk2, chunk1
        else:
            lower, higher = chunk1, chunk2

        forward = [lower]
        worklist = deque([higher])
        while worklist:
            chunk = worklist.popleft()
            if chunk in forward:
                continue
            forward.append(chunk)
            worklist.extend(chunk for chunk in chunk.depends if chunk.level > lower.level)
        backward = [higher]
        worklist = deque([lower])
        while worklist:
            chunk = worklist.popleft()
            if chunk in backward:
                continue
            backward.append(chunk)
            worklist.extend(chunk for chunk in chunk.depended if chunk.level < higher.level)
        # the lower and the higher must be inside
        join = []
        for c in forward + backward:
            if c not in join:
                join.append(c)

        return self.__try_to_merge(join, binders=binders, constraint_checkers=constraint_checkers)

    def __try_to_merge(
        self,
        chunks: Iterable[CodeChunk],
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):
        """
        Try to merge a list of chunks. Raise MergeFailed if this merge break some constraint.
        """
        for name, checker in constraint_checkers:
            if not checker(chunks):
                raise MergeFailed(name)
        highest = max(chunks, key=lambda c: c.level)
        for chunk in chunks:
            self.__do_merge(highest, chunk)
        self.set_level(highest)
        for binder in binders:
            binder(len(self.chunks))
        return highest

    def __do_merge(self, stay: CodeChunk, leave: CodeChunk):
        """
        Merge chunk2 into chunk1. This method do the trivial job and doesn't keep acyclicity and don't guarantee the size is under max size.
        Args:
            chunk1 (CodeChunk): chunk 1
            chunk2 (CodeChunk): chunk 2
        """
        if stay == leave:
            return
        stay.swallow(leave)
        self.chunks.remove(leave)
        for slice in leave.slices:
            self.chunk_of_slice[slice.id] = stay.id

    def set_level(self, chunk: CodeChunk):
        """
        Args:
            chunk (CodeChunk): chunk
        """
        workList = deque()

        def set_dependency_level(chunk: CodeChunk, new_level):
            for dep in chunk.depends:
                workList.append((dep, new_level))

        new_level = min((dep.level for dep in chunk.depended), default=0) - 1
        chunk.level = new_level
        set_dependency_level(chunk, new_level - 1)

        while len(workList):
            chunk, new_level = workList.popleft()
            old_level = chunk.level
            if new_level < old_level:
                chunk.level = new_level
                set_dependency_level(chunk, new_level - 1)
            elif new_level > old_level:
                should_set = True
                for dep in chunk.depended:
                    if dep.level <= new_level:
                        should_set = False
                        break
                if should_set:
                    chunk.level = new_level
                    set_dependency_level(chunk, new_level - 1)

    def put_together(
        self,
        slices: Iterable[CodeSlice],
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ) -> List[CodeChunk]:
        """
        Try out best to put given slices into one chunk. This will result in multiple chunks because merging into one might exceed the max size.
        Args:
            slices (Iterable[CodeSlice]): code slices
        Returns:
            List[CodeChunk]: the chunk that contains these slices.
        """
        slices = sorted(
            slices,
            key=lambda slice: self.get_chunk_by_id(self.chunk_of_slice[slice.id]).level,
            reverse=True,
        )
        chunks: List["CodeChunk"] = []
        while slices:
            chunk_id = self.chunk_of_slice[slices[0].id]
            chunk = self.get_chunk_by_id(chunk_id)
            chunks.append(chunk)
            slices.pop(0)
            for slice in slices.copy():
                try:
                    chunk = self.merge(
                        chunk,
                        self.get_chunk_by_id(self.chunk_of_slice[slice.id]),
                        binders=binders,
                        constraint_checkers=constraint_checkers,
                    )
                    slices.remove(slice)
                except MergeFailed:
                    pass
        return chunks

    def get_order(self) -> List[CodeChunk]:
        """
        Get the topo order of chunks.
        Returns:
            List[CodeChunk]: the order of chunks.
        """
        return sorted(self.chunks, key=lambda c: c.level)

    def simple_strategy(
        self,
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):
        """
        A simple strategy which is merge the chunk if it have only one dependency or depended by only one chunk.
        """
        for chunk in self.chunks.copy():
            if chunk not in self.chunks:
                continue
            if len(chunk.depends) == 1:
                try:
                    chunk = self.merge(chunk, next(iter(chunk.depends)), binders=binders, constraint_checkers=constraint_checkers)
                except KeyboardInterrupt as e:
                    raise e
                except Exception as e:
                    traceback.print_exc()
                    raise e

            if len(chunk.depended) == 1:
                try:
                    chunk = self.merge(
                        chunk, next(iter(chunk.depended)), binders=binders, constraint_checkers=constraint_checkers
                    )
                except KeyboardInterrupt as e:
                    raise e
                except Exception as e:
                    traceback.print_exc()
                    raise e

    def __greedy_strategy(
        self,
        gain: Callable[[CodeChunk, CodeChunk], float],
        depend_only=False,
        siblings_only=False,
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):
        """
        A greedy algorithm to merge chunks pair by pair. This will merge the pair of chunks that have the largest gain until merges of any pair will break the constraints.
        Please make sure the gain is symmetric and local. "Local" means gain only rely on the two chunks and their neighbors.
        Args:
            gain (Callable[[CodeChunk], float]): a function that calculate the gain of merging a chunk.
        """

        class Pair:
            def __init__(self, chunk1, chunk2):
                self.chunk1 = chunk1
                self.chunk2 = chunk2

            def __eq__(self, other):
                return (self.chunk1, self.chunk2) == (other.chunk1, other.chunk2) or (self.chunk1, self.chunk2) == (
                    other.chunk2,
                    other.chunk1,
                )

            def __hash__(self):
                return hash(self.chunk1) * hash(self.chunk2)

        @dataclass(order=True)
        class PairWithGain:
            pair: Pair = field(compare=False)
            neg_gain: float
            timestamp: int = field(compare=False)

        gain_list: List[PairWithGain] = []
        gain_table: Dict[Pair, float] = defaultdict(float)
        timestamps: Dict[Pair, int] = defaultdict(int)

        def pairs_around(chunk: CodeChunk):
            """
            Only pairs of chunks that
            - share the same dependent
            - share the same dependency
            - one depends on another
            """
            if not depend_only:
                yield from map(lambda t: Pair(*t), itertools.combinations(chunk.depends, 2))
                yield from map(lambda t: Pair(*t), itertools.combinations(chunk.depended, 2))
            if not siblings_only:
                yield from map(lambda t: Pair(*t), itertools.product([chunk], chunk.depends | chunk.depended))

        def update_gain(chunks: Iterable[CodeChunk]):
            """
            update the gain of pairs around this chunk.
            """
            nonlocal gain_list, gain, gain_table
            update_pairs: Set[Pair] = set()
            for chunk in chunks:
                update_pairs.update(pairs_around(chunk))
            for pair in update_pairs:
                gain_res = gain(pair.chunk1, pair.chunk2)
                if gain_table[pair] == gain_res:
                    continue
                gain_table[pair] = gain_res
                pwg = PairWithGain(pair, -gain_res, timestamps[pair] + 1)
                heapq.heappush(gain_list, pwg)
                timestamps[pair] += 1

        # initialize gain table

        update_gain(self.chunks)

        while gain_list:
            pwg = heapq.heappop(gain_list)
            chunk1 = pwg.pair.chunk1
            chunk2 = pwg.pair.chunk2
            if pwg.neg_gain >= 0:
                continue
            if pwg.timestamp < timestamps[pwg.pair]:
                # too old
                continue
            if chunk1 not in self.chunks or chunk2 not in self.chunks:
                continue
            try:
                stay = self.merge(chunk1, chunk2, binders=binders, constraint_checkers=constraint_checkers)
                join = []
                for c in list(stay.depends) + list(stay.depended) + [stay]:
                    if c not in join:
                        join.append(c)
                update_gain(join)

            except MergeFailed:
                pass

    def share_dependency_strategy(
        self,
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):
        """
        We are trying to find out a metric preferring to take all slices into one chunk. If possible, we would like to translate all code once. Modularity doesn't do that.
        We would like to merge slices as many as possible, and the smaller a chunk is, the more potential it has.
        So, we prefer a merge to have smaller size.
        And we don't care dependencies inside a chunk, but prefer the dependencies among chunks to be strong.
        """

        def gain(chunk1: CodeChunk, chunk2: CodeChunk):
            # size_sum = chunk1.size + chunk2.size
            # depends_share = len(chunk1.depends & chunk2.depends) / len(chunk1.depends | chunk2.depends)
            depended_sum = sum(chunk1.w_depended.values()) + sum(chunk2.w_depended.values())
            if depended_sum == 0:
                depended_share = 0
            else:
                common_depended = chunk1.depended & chunk2.depended
                common_weight = sum(chunk1.get_depended_weight(dep) + chunk2.get_depended_weight(dep) for dep in common_depended)
                depended_share = common_weight / depended_sum
            return depended_share

        self.__greedy_strategy(
            gain,
            siblings_only=True,
            binders=binders,
            constraint_checkers=constraint_checkers,
        )

    def modularity_strategy(
        self,
        binders: Iterable[Callable[[int], None]] = [],
        constraint_checkers: Iterable[Tuple[str, Callable[[Iterable[CodeChunk]], bool]]] = [],
    ):

        def gain(chunk1: CodeChunk, chunk2: CodeChunk):

            if chunk1 in chunk2.depends:
                return chunk2.get_depend_weight(chunk1) / chunk2.size / len(chunk1.slices)
            if chunk2 in chunk1.depends:
                return chunk1.get_depend_weight(chunk2) / chunk1.size / len(chunk2.slices)
            return 0

        self.__greedy_strategy(
            gain,
            depend_only=True,
            binders=binders,
            constraint_checkers=constraint_checkers,
        )

    def get_chunk_by_id(self, id: CodeChunkID) -> CodeChunk:
        ret = None
        for chunk in self.chunks:
            if chunk.id == id:
                ret = chunk
                break
        if ret is None:
            raise KeyError(f"chunk {id} not found")
        return ret
