from __future__ import print_function, unicode_literals
import collections

import dgl

from .pyrtlexceptions import PyrtlError, PyrtlInternalError
import sys
sys.setrecursionlimit(1000000)

def scala_map(A,B):
    result=[]
    for a in A:
        result.append(B[a])
    return result
def flatten(A):
    result=[]
    for a in A:
        for i in a:
            result.append(i)
    return result
def flatmap(A,B):
    return flatten(scala_map(A,B))

def distinct(A):
    result=[]
    for a in A:
        if a not in result:
            result.append(a)
    return result

def diff(A,B):
    result=[]
    for a in A:
        if a not in B:
            result.append(a)
    return result

def MFFC(g, excludeSet=[], size=20):
    """
    从所有的外部输出节点和状态组件的输入节点v开始，向前搜索，找到理论上满足于MFFC(v)的尽可能多的节点，将这些节点加入MFFC(v)中
    """
    Unclaimed = -1
    Excluded = -2
    mffc = []
    mffc_partition_size = [] #size of mffc(v)
    nodes_num = g.num_nodes()
    inNeigh = []
    outNeigh = []
    for id in range(nodes_num):
        inNeigh.append(g.predecessors(id).tolist())
        outNeigh.append(g.successors(id).tolist())
        mffc_partition_size.append(0)
    #print(inNeigh)

    for i in range(len(inNeigh)):
        mffc.append(Unclaimed)
    for id in excludeSet:
        mffc[id] = Excluded

    def maximizeFFCs(fringe):
        fringeAncestors = []
        result=flatmap(fringe,inNeigh)
        #print('result:'+str(result))
        for i in result:
            if mffc[i]==Unclaimed:
                fringeAncestors.append(i)
        fringeAncestors=distinct(fringeAncestors)
        newMembers = []
        for parent in fringeAncestors:
            childrenMFFCs = distinct(scala_map(outNeigh[parent],mffc))
            if len(childrenMFFCs) == 1 and childrenMFFCs[0] != Unclaimed:
                mffc[parent] = childrenMFFCs[0]
                mffc_partition_size[childrenMFFCs[0]] += 1
                if mffc_partition_size[childrenMFFCs[0]] > size:
                    break
                newMembers.append(parent)
        newMembers=distinct(newMembers)
        if len(newMembers):
            maximizeFFCs(newMembers)

    def findMFFCs():
        unvisitedSinks = []# unvisitedSinks：外部输出信号以及状态组件输入信号
        visited = []
        for id in range(len(inNeigh)):
            if mffc[id] == Unclaimed and len(outNeigh[id]) == 0:
                unvisitedSinks.append(id)
            if mffc[id] != Unclaimed:
                visited.append(id)
        #print('unvisitedSinks:'+str(unvisitedSinks))
        #print('visited:'+str(visited))
        #fringe：外部输入信号和状态组件输出信号的入度信号
        fringe = distinct(flatmap(visited,inNeigh))
        #print('inneigh:'+str(fringe))
        unvisitedFringe = []
        for f in fringe:
            if mffc[f] == Unclaimed:
                unvisitedFringe.append(f)
        newMFFCseeds=[]
        for u in unvisitedSinks:
            newMFFCseeds.append(u)
        for u in unvisitedFringe:
            newMFFCseeds.append(u)
        newMFFCseeds=distinct(newMFFCseeds)
        #print('MFFC:',newMFFCseeds)
        if len(newMFFCseeds) == 0:
            return mffc
        else:
            for id in newMFFCseeds:
                mffc[id] = id
                mffc_partition_size[id] += 1
            if mffc_partition_size[id] < size:
                maximizeFFCs(newMFFCseeds)
            return findMFFCs()
    mffc = findMFFCs()
    for id in excludeSet:
        mffc[id] = id
    partition = {}
    for index,id in enumerate(mffc):
        if id in partition.keys():
            partition[id].append(index)
        else:
            partition[id]=[index]

    for item in partition.items():
        part = []
        original_part = []
        #print(str(item[0])+':'+str(item[1]))
        graph=dgl.node_subgraph(g,item[1])
        original_id = graph.ndata[dgl.NID].tolist()
        #print(original_id)
        for l in dgl.topological_nodes_generator(graph):
            part.extend(l.tolist())
        for id in part:
            original_part.append(original_id[id])
        #print(original_part)
        partition[item[0]] = original_part

    return partition #,mffc_partition_size

# class AcyclicPart:
#     def __init__(self,mffc_partition,excludeSet=[]):
#         self.mffc=mffc_partition
#         self.excludeSet =excludeSet
#
#     #查找小分区
#     def findSmallParts(self,smallPartCutoff):
#         result=[]
#         for id in self.mffc:
#             if (self.mg.nodeSize(id)<smallPartCutoff) and (id not in self.excludeSet):
#                 result.append(id)
#         return result
#
#     #执行可能的合并操作
#     def perfomMergesIfPossible(self,mergesToConsider):
#         mergesMade=[]
#         for mergeReq in mergesToConsider:
#             if len(mergeReq)>1:
#                 partsStillExist=True
#                 for i in mergeReq:
#                     if i not in self.mg.mergeIDToMembers:
#                         partsStillExist=False
#                         break
#                 if partsStillExist and self.mg.mergeIsAcyclic_set(mergeReq):
#                     flag=True
#                     for id in mergeReq:
#                         if id in self.excludeSet:
#                             flag=False
#                             break
#                     if flag:
#                         self.mg.mergeGroups(mergeReq[0],mergeReq[1:])
#                         mergesMade.append(mergeReq)
#         return mergesMade
#
#     def numEdgesRemovedByMerge(self,mergeReq):
#         totalInDegree=0
#         totalOutDegree=0
#         for id in mergeReq:
#             totalInDegree+=len(self.mg.inNeigh[id])
#             totalOutDegree+=len(self.mg.outNeigh[id])
#         mergedInDegree=len(diff(distinct(flatmap(mergeReq,self.mg.inNeigh)),mergeReq))
#         mergedOutDegree=len(diff(distinct(flatmap(mergeReq,self.mg.outNeigh)),mergeReq))
#         return totalInDegree + totalOutDegree - (mergedInDegree + mergedOutDegree)
#
#     def coarsenWithMFFCs(self):
#         mffcResults=MFFC(self.mg,self.excludeSet)
#         self.mg.applyInitialAssignments(mffcResults)
#
#     #将单输入分区合并到其父分区
#     def mergeSingleInputPartsIntoParents(self,smallPartCutoff=20):
#         smallPartIDs=self.findSmallParts(smallPartCutoff)
#         singleInputIDs=[]
#         for id in smallPartIDs:
#             if len(self.mg.inNeigh[id])==1:
#                 singleInputIDs.append(id)
#         singleInputParents=distinct(flatmap(singleInputIDs,self.mg.inNeigh))
#         baseSingleInputIDs=diff(singleInputIDs,singleInputParents)
#         for childID in baseSingleInputIDs:
#             parentID=self.mg.inNeigh[childID][0]
#             if parentID not in self.excludeSet:
#                 self.mg.mergeGroups(parentID,[childID])
#         if len(baseSingleInputIDs)<len(singleInputIDs):
#             self.mergeSingleInputPartsIntoParents(smallPartCutoff)
#
#     #将小的兄弟分区合并
#     def mergeSmallSiblings(self,smallPartCutoff=10):
#         smallPartIDs=self.findSmallParts(smallPartCutoff)
#         inputsToSiblings=[]
#         for id in smallPartIDs:
#             inputsCanonicalized=sorted(distinct(self.mg.inNeigh[id]))
#             flag=False
#             for i in inputsToSiblings:
#                 if i[0]==inputsCanonicalized:
#                     flag=True
#                     inputsToSiblings[inputsToSiblings.index(i)][1].append(id)
#                     break
#             if flag==False:
#                 inputsToSiblings.append([inputsCanonicalized,[id]])
#         mergesToConsider=[]
#         for i in inputsToSiblings:
#             siblingIDs=i[1]
#             if len(siblingIDs)>1:
#                 mergesToConsider.append(siblingIDs)
#         mergesMade=self.perfomMergesIfPossible(mergesToConsider)
#         if len(mergesMade):
#             self.mergeSmallSiblings(smallPartCutoff)
#
#     #将小分区与任意规模的兄弟分区合并
#     def mergeSmallParts(self,smallPartCutoff=20,mergeThreshold=0.5):
#         smallPartIDs=self.findSmallParts(smallPartCutoff)
#         mergesToConsider=[]
#         for id in smallPartIDs:
#             numInputs=len(self.mg.inNeigh[id])
#             siblings=diff(distinct(flatmap(self.mg.inNeigh[id],self.mg.outNeigh)),[id])
#             legalSiblings=[]
#             for sibID in siblings:
#                 if sibID not in self.excludeSet:
#                     legalSiblings.append(sibID)
#             orderConstrSibs=[]
#             #le 拓扑排序上在id之前的分区
#             for le in legalSiblings:
#                 if le < id:
#                     orderConstrSibs.append(le)
#             myInputSet=distinct(self.mg.inNeigh[id])
#             sibsScored=[]
#             for sibID in orderConstrSibs:
#                 count=0
#                 result=distinct(self.mg.inNeigh[sibID])
#                 for r in result:
#                     if r in myInputSet:
#                         count+=1
#                 sibsScored.append([count/numInputs,sibID])
#             choices=[]
#             for sib in sibsScored:
#                 if sib[0]>=mergeThreshold:
#                     choices.append(sib)
#             choicesOrdered=sorted(choices,reverse=True)
#             topChoice=[]
#             for ch in choicesOrdered:
#                 sibID=ch[1]
#                 if self.mg.mergeIsAcyclic(sibID,id):
#                     topChoice.append(ch)
#                     break
#             if len(topChoice):
#                 mergesToConsider.append([topChoice[0][1],id])
#         mergesMade=self.perfomMergesIfPossible(mergesToConsider)
#         if len(mergesMade):
#             self.mergeSmallParts(smallPartCutoff,mergeThreshold)
#
#     #向下合并小分区
#     def mergeSmallPartsDown(self,smallPartCutoff=20):
#         smallPartIDs=self.findSmallParts(smallPartCutoff)
#         mergesToConsider=[]
#         for id in smallPartIDs:
#             mergeableChildren=[]
#             for childID in self.mg.outNeigh[id]:
#                 if self.mg.mergeIsAcyclic(id,childID) and (childID not in self.excludeSet):
#                     mergeableChildren.append(childID)
#             if len(mergeableChildren):
#                 orderedByEdgesRemoved=[]
#                 for childID in mergeableChildren:
#                     orderedByEdgesRemoved.append([self.numEdgesRemovedByMerge([id,childID]),childID])
#                 orderedByEdgesRemoved.sort()
#                 topChoice=orderedByEdgesRemoved[-1][1]
#                 mergesToConsider.append([id,topChoice])
#         mergesMade=self.perfomMergesIfPossible(mergesToConsider)
#         if len(mergesMade):
#             self.mergeSmallPartsDown(smallPartCutoff)
#
#     def iterParts(self):
#         return self.mg.iterGroups()
#
#     def partition(self,smallPartCutoff=20):
#         self.coarsenWithMFFCs()
#         self.mergeSingleInputPartsIntoParents()
#         self.mergeSmallSiblings(smallPartCutoff)
#         self.mergeSmallParts(smallPartCutoff, 0.5)
#         self.mergeSmallPartsDown(smallPartCutoff)
#         self.mergeSmallParts(2*smallPartCutoff, 0.25)

class Stack(object):
    def __init__(self):
        self.items = list()

    def push(self, item):
        self.items.append(item)

    def pop(self):
        return self.items.pop()

    def clear(self):
        del self.items[:]

    def empty(self):
        return self.size() == 0

    def size(self):
        return len(self.items)

    def top(self):
        return self.items[self.size() - 1]


class Tarjan(object):
    def __init__(self,u,g):
        self.g = g
        self.scc_list=[]
        self.s=Stack()
        self.dfn = dict()
        self.low = dict()
        self.flag = dict()
        self.count = 0
        self.tarjan(u)
        self.s.clear()

    def tarjan(self,u):
        #print(u)
        self.count = self.count + 1
        self.dfn[u] = self.low[u] = self.count
        self.s.push(u)
        self.flag[u] = True  # 访问过了
        for i in self.g.successors(u).tolist():
            if self.flag.get(i,False):  # 访问过了
                if i in self.s.items: #还在栈内
                    self.low[u] = min(self.low[u],self.dfn[i])
            else:  # 未被访问过,继续找
                self.tarjan(i)
                self.low[u] = min(self.low[u], self.low[i])
        if self.dfn[u] == self.low[u] :
            m = self.s.pop()
            #self.flag[m] = False
            if m != u:
                #print("********环内节点********")
                scc =[]
                scc.append(m)
                #print(m)
                while m != u and self.s.empty() is False:
                    m = self.s.pop()
                    #self.flag[m] = False
                    scc.append(m)
                    #print(m)
                #print("*********************")
                self.scc_list.append(scc)


class macro(collections.namedtuple('macro', ['logicnet','args', 'dests'])):
    """
    环部分，作为一个超节点
    """

    def __str__(self):
        rhs = ', '.join(str(x) for x in self.args)
        lhs = ', '.join(str(x) for x in self.dests)
        net = ', '.join(str(x) for x in self.logicnet)

        return "{} <-- {} -- {}".format(lhs,net, rhs)


    def __hash__(self):
        # it seems that namedtuple is not always hashable
        return hash(tuple(self))

    def __eq__(self, other):
        # We can't be going and calling __eq__ recursively on the logic nets for all of
        # the args and dests because that will actually *create* new logic nets which is
        # very much not what people would expect to happen.  Instead we define equality
        # as the immutable fields being equal and the list of args and dests being
        # references to the same objects.
        return (len(self.args) == len(other.args)
                and len(self.dests) == len(other.dests)
                and all(self.args[i] is other.args[i] for i in range(len(self.args)))
                and all(self.dests[i] is other.dests[i] for i in range(len(self.dests))))

    def __ne__(self, other):
        return not self.__eq__(other)

    def _compare_error(self, other):
        """ Throw error when LogicNets are compared.

        Comparisons get you in a bad place between while you can compare op and op_param
        safely, the args and dests are references to mutable objects with comparison
        operators overloaded.
        """
        raise PyrtlError('Greater than and less than comparisons between'
                         ' LogicNets are not supported')

    __lt__ = _compare_error
    __gt__ = _compare_error
    __le__ = _compare_error
    __ge__ = _compare_error


class merge_LogicNet(collections.namedtuple('merge_node', ['logicnet','args', 'dests'])):
    """
    多单节点层次合并，作为一个单节点层次
    """

    def __str__(self):
        rhs = ', '.join(str(x) for x in self.args)
        lhs = ', '.join(str(x) for x in self.dests)
        net = ', '.join(str(x) for x in self.logicnet)

        return "{} <-- {} -- {}".format(lhs,net, rhs)


    def __hash__(self):
        # it seems that namedtuple is not always hashable
        return hash(tuple(self))

    def __eq__(self, other):
        # We can't be going and calling __eq__ recursively on the logic nets for all of
        # the args and dests because that will actually *create* new logic nets which is
        # very much not what people would expect to happen.  Instead we define equality
        # as the immutable fields being equal and the list of args and dests being
        # references to the same objects.
        return (len(self.args) == len(other.args)
                and len(self.dests) == len(other.dests)
                and all(self.args[i] is other.args[i] for i in range(len(self.args)))
                and all(self.dests[i] is other.dests[i] for i in range(len(self.dests))))

    def __ne__(self, other):
        return not self.__eq__(other)

    def _compare_error(self, other):
        """ Throw error when LogicNets are compared.

        Comparisons get you in a bad place between while you can compare op and op_param
        safely, the args and dests are references to mutable objects with comparison
        operators overloaded.
        """
        raise PyrtlError('Greater than and less than comparisons between'
                         ' LogicNets are not supported')

    __lt__ = _compare_error
    __gt__ = _compare_error
    __le__ = _compare_error
    __ge__ = _compare_error


# g=dgl.graph(([0,1,2,3,4,5,5],[4,4,5,5,6,6,7]))
# print(MFFC(g,[5]))

