import os
import sys
import copy
import math
import random
import networkx as nx
from datetime import datetime

sys.path.append("..")
try:
    from CoreSim.Flow import *
    from CoreSim.Event import *
    from CoreSim.Link import *
    from CoreSim.Unit import *
    from CoreSim.SchedulerFair import *

    from Controller.gen_topo_fully import *
    from Controller.gen_topo_detour import *
    from Controller.gen_topo_ovs import *

    import Controller.topo_gen as topo_gen
    import Controller.demand_estimation as dm_est
except:
    raise

SERVER = 0
TOR = 1

Shortest = 0
K_Shortest = 1


def GetToRId(hostId, host_num):
    return (hostId - 1) / host_num + 1


def ConvertToNodeId(nodeId, role, hosts_num):
    """
    Convert regular device id into node id.
    Four roles are defined: 0:server, 1:tor switch
    """
    if role == 0:
        return nodeId
    else:
        return nodeId + hosts_num


class SchedulerHybrid(SchedulerFair):
    def __init__(self, links, s_links, host_num, switches_num, bandwidth, sched_type, k=2):
        SchedulerFair.__init__(self, host_num, switches_num)

        # network topology parameter
        self.links = links
        self.static_links = s_links
        self.dynamic_links = links - s_links

        self.host_num = host_num
        self.switches_num = switches_num
        self.hosts_num = host_num * switches_num
        self.bandwidth = bandwidth
        self.host_location = {}  # {host_id: tor_id}

        # k-shortest path routing
        # path is identified by source node ID and destination node ID
        # multi-paths [[id1, id2, id3],...,[id1, id2, id3]]
        self.k = k
        self.pathList = {}

        # edge attributes: 'weight': bandwidth
        self.com_network = nx.DiGraph()  # Including all the information of the network, host and switch
        self.mid_network = nx.DiGraph()  # The overlap between the dynamic network
        self.s_matches = {}  # static pre_matches
        self.d_matches = {}  # dynamical pre_matches
        self.sched_type = sched_type  # scheduler type
        self.row_size = int(math.ceil(math.sqrt(self.switches_num)))

        # demand estimation
        self.interval = 10.0e-2  # reconfiguration interval
        self.adjust_delay = 10.0e-3  # reconfiguration delay
        self.size_threshold = 100000  #
        self.threshold = 0.2  # elephant flow threshold
        self.flows_src = {}  # {'converged': False, 'demand': throughput, 'recLimited': False}
        self.flows_dst = {}  # {'converged': False, 'demand': throughput, 'recLimited': False}

        self.show = False

    def InitialTopology(self, topoDir="", s_topoFileName="topo_static", d_topoFileName="topo_dynamic"):

        if not os.path.exists(inDir + topoDir):
            os.makedirs(inDir + topoDir)

        if not os.path.isfile(inDir + topoDir + s_topoFileName):
            # static network
            match_results = topo_gen.k_cycle_network(self.switches_num, self.static_links)
            self.s_matches = copy.deepcopy(match_results)
            self.Update_ct_network(self.com_network, match_results)

            f = open(inDir + topoDir + s_topoFileName, "w")
            for portId in match_results:
                for srcToR, dstToR in match_results[portId]:
                    print >> f, portId, srcToR, dstToR
            f.close()

            # dynamic network
            match_results = topo_gen.k_regular_network(self.switches_num, self.dynamic_links)
            self.d_matches = match_results
            self.Update_ct_network(self.com_network, match_results)

            f = open(inDir + topoDir + d_topoFileName, "w")
            for portId in match_results:
                for srcToR, dstToR in match_results[portId]:
                    print >> f, portId, srcToR, dstToR
            f.close()
        else:
            f = open(inDir + topoDir + s_topoFileName, "r")
            for line in f.readlines():
                l = line.split()
                portId = int(l[0])
                srcToR = int(l[1])
                dstToR = int(l[2])
                if portId not in self.s_matches:
                    self.s_matches[portId] = [(srcToR, dstToR)]
                else:
                    self.s_matches[portId].append((srcToR, dstToR))
            f.close()
            self.Update_ct_network(self.com_network, self.s_matches)

            f = open(inDir + topoDir + d_topoFileName, "r")
            for line in f.readlines():
                l = line.split()
                portId = int(l[0])
                srcToR = int(l[1])
                dstToR = int(l[2])
                if portId not in self.d_matches:
                    self.d_matches[portId] = [(srcToR, dstToR)]
                else:
                    self.d_matches[portId].append((srcToR, dstToR))
            f.close()
            self.Update_ct_network(self.com_network, self.d_matches)

        for switchId in range(1, self.switches_num + 1):
            for hostId in range(1, self.host_num + 1):
                self.host_location[hostId + (switchId - 1) * self.host_num] = switchId

        self.CreateLinks(self.com_network)
        self.AssignPaths(self.com_network)

    def CreateLinks(self, graph):
        """
        """
        # host-sw links
        for hostId in xrange(1, self.hosts_num + 1):
            s2torId = (hostId - 1) / self.host_num + 1
            torNodeId = ConvertToNodeId(s2torId, TOR, self.hosts_num)

            self.Links[hostId, torNodeId] = Link((hostId, torNodeId))
            self.Links[torNodeId, hostId] = Link((torNodeId, hostId))
            self.Links[hostId, torNodeId].linkCap = self.bandwidth
            self.Links[torNodeId, hostId].linkCap = self.bandwidth

        # sw-sw links
        for src, dst in graph.edges():
            srcTorID = ConvertToNodeId(src, TOR, self.hosts_num)
            dstToRID = ConvertToNodeId(dst, TOR, self.hosts_num)
            self.Links[srcTorID, dstToRID] = Link((srcTorID, dstToRID))
            self.Links[srcTorID, dstToRID].linkCap = self.bandwidth * graph[src][dst]['weight']

    def AssignFlows(self, args):
        """
        args (trafficType, filename)
        """
        self.trafficType = args[0]
        self.fileName = args[1]
        f = open(inDir + self.fileName, "r")
        for line in f.readlines():
            l = line.split()
            flow = Flow()
            if self.trafficType == 'CDF':  # dynamic workload based on CDF
                flow.startId = int(l[1])
                flow.endId = int(l[2])
                flow.SetFlowSize(float(l[4]) * 8)
                flow.startTime = float(l[0])
                flow.flowId = len(self.flows) + 1
            else:  # static workload (stride, random ..)
                flow.startId = int(l[0])
                flow.endId = int(l[1])
                flow.flowId = len(self.flows) + 1

                # initial demand estimation
                if flow.startId not in self.flows_src:
                    self.flows_src[flow.startId] = {}
                if flow.endId not in self.flows_dst:
                    self.flows_dst[flow.endId] = {}

                self.flows_src[flow.startId][flow.endId] = {'converged': False, 'demand': 0.1, 'recLimited': False}
                self.flows_dst[flow.endId][flow.startId] = {'converged': False, 'demand': 0.1, 'recLimited': False}

            self.flows.append(flow)
        f.close()

        split = self.fileName.split('/')
        if len(split) >= 2:
            if not os.path.exists(outDir + split[0]):
                os.makedirs(outDir + split[0])
            self.fileName = split[0] + '/' + self.sched_type + '_' + split[1]
        else:
            self.fileName = self.sched_type + '_' + self.fileName

    def AssignPaths(self, graph, routing_type=K_Shortest):
        for flow in self.flows:
            if (flow.startId, flow.endId) not in self.pathList:
                self.CalculateKPaths(graph, routing_type, flow.startId, flow.endId)

            pathlists = self.pathList[flow.startId, flow.endId]
            index = random.randint(1, len(pathlists))
            pathNodeIds = pathlists[index - 1]
            # print 'pathNodeIds',pathNodeIds
            flow.BuildPath(pathNodeIds)

    def CalculateKPaths(self, graph, routing_type, srcHostId, dstHostId):
        """
        @ topo
        @ routing_type :k_shortest, shortest
        @ host srcHostId
        @ host dstHostId
        """

        self.pathList[srcHostId, dstHostId] = []

        if srcHostId == dstHostId:
            self.pathList[srcHostId, dstHostId].append([srcHostId])
            return

        srcToRId = GetToRId(srcHostId, self.host_num)
        dstToRId = GetToRId(dstHostId, self.host_num)

        # if src host and dst host are in the same parent ToR switch
        if srcToRId == dstToRId:
            srcToRNodeId = ConvertToNodeId(srcToRId, TOR, self.hosts_num)
            self.pathList[srcHostId, dstHostId].append([srcHostId, srcToRNodeId, dstHostId])
            return
        else:
            k_paths = []
            tmp_cutoff = 1
            if routing_type == Shortest:
                while not k_paths:
                    k_paths = list(nx.all_simple_paths(graph, source=srcToRId, target=dstToRId, cutoff=tmp_cutoff))
                    tmp_cutoff += 1
                    assert (tmp_cutoff < 20)
            elif routing_type == K_Shortest:
                while len(k_paths) <= self.k:
                    k_paths = list(nx.all_simple_paths(graph, source=srcToRId, target=dstToRId, cutoff=tmp_cutoff))
                    tmp_cutoff += 1
                    if tmp_cutoff > 10 and k_paths:
                        break
            # print k_paths
            for path in k_paths:
                pathList = [srcHostId]
                for ToRId in path:
                    ToRNodeId = ConvertToNodeId(ToRId, TOR, self.hosts_num)
                    pathList.append(ToRNodeId)
                pathList.append(dstHostId)
                self.pathList[srcHostId, dstHostId].append(pathList)
                # print 'self.pathList', self.pathList[srcHostId, dstHostId]

    def UpdateTopology(self, eventQueue, curTime):
        # bipartite graph
        tf_graph = nx.Graph()
        wl_graph = nx.Graph()
        bp_src = range(1, self.switches_num + 1)
        bp_dst = range(self.switches_num + 1, self.switches_num * 2 + 1)

        for src in bp_src:
            tf_graph.add_node(src, bipartite=0, degree=0)
            wl_graph.add_node(src, bipartite=0, degree=0)
        for dst in bp_dst:
            tf_graph.add_node(dst, bipartite=1, degree=0)
            wl_graph.add_node(dst, bipartite=1, degree=0)

        '''
        for k in self.d_matches:
            print 'dynamic_matches', k, sorted(self.d_matches[k])
        '''

        starttime = datetime.now()

        # STEP1. demand estimation max-min fairness
        self.flows_src = {}
        self.flows_dst = {}
        for flow in self.runningFlows:
            if flow.remainSize > self.size_threshold:
                if flow.startId not in self.flows_src:
                    self.flows_src[flow.startId] = {}
                if flow.endId not in self.flows_dst:
                    self.flows_dst[flow.endId] = {}
                self.flows_src[flow.startId][flow.endId] = {'converged': False, 'demand': 0.1, 'recLimited': False}
                self.flows_dst[flow.endId][flow.startId] = {'converged': False, 'demand': 0.1, 'recLimited': False}

        Host_Matrix = dm_est.demand_estimation(self.flows_src, self.flows_dst)
        dm_est.transform(Host_Matrix, self.host_location, tf_graph, self.switches_num, self.threshold)

        d_matches = {}
        s_matches = {}
        new_matches = {}
        # 2. Fully reconfigurable Non-blocking
        if self.sched_type == 'OSA':
            gt_fully = gen_topo_fully()
            gt_fully._transform(wl_graph, tf_graph, self.dynamic_links, self.switches_num)
            cur_matches = gt_fully._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_fully._color_assignment(self.d_matches, cur_matches,
                                                                             self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'Controller Overhead', timediff

            # print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, sorted(new_matches[k])
                    # print sorted(new_matches[k])
                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum
        elif self.sched_type == 'Detour':
            gt_detour = gen_topo_detour()
            s_matches = {}
            d_matches = {}
            for x in self.d_matches:
                s_matches[x] = copy.copy(self.d_matches[x])
                d_matches[x] = []
            gt_detour._wavelength_adjustment(self.d_matches, d_matches, s_matches, self.row_size, self.switches_num)
            gt_detour._transform(wl_graph, tf_graph, self.dynamic_links, self.switches_num)
            cur_matches = gt_detour._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_detour._color_assignment(self.d_matches, cur_matches,
                                                                              self.switches_num)
            gt_detour._wavelength_adjustment(new_matches, d_matches, s_matches, self.row_size, self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'Controller Overhead', timediff

            print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, len(new_matches[k])
                    print sorted(new_matches[k])

                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum
        elif self.sched_type == 'OvS':
            gt_ovs = gen_topo_ovs()
            gt_ovs._transform(wl_graph, tf_graph, self.dynamic_links, self.row_size, self.switches_num)
            cur_matches = gt_ovs._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_ovs._color_assignment(self.d_matches, cur_matches,
                                                                           self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'Controller Overhead', timediff

            print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, len(new_matches[k])
                    print sorted(new_matches[k])

                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum

        '''
        3.Update the network topology and routing
        @ d_matches: update the network of the data plane
        @ s_matches: update the mid_network, combine with the static_network(which guarantees the network is connected)
        @ new_matches: update the com_network of the control plane
        '''
        self.d_matches = copy.deepcopy(new_matches)

        print "com_network"
        self.com_network = nx.DiGraph()
        self.Update_ct_network(self.com_network, self.s_matches)
        self.Update_ct_network(self.com_network, new_matches)

        print "mid_network"
        self.mid_network = nx.DiGraph()
        self.Update_ct_network(self.mid_network, self.s_matches)
        self.Update_ct_network(self.mid_network, s_matches)
        print "Update_dt_network"
        self.UpdateEnergy(self.runningFlows, curTime)
        self.Update_dt_network(self.mid_network, K_Shortest)
        print "Reconfigure finished"
        self.UpdateFlow(0, UpdateTopology, eventQueue, curTime)
        print "update flow"
        event = LinkAdjustedEvent(curTime + self.adjust_delay + timediff.total_seconds())
        eventQueue.push(event, event.time)

    def UpdateTopologyFinished(self, eventQueue, curTime):
        """
        After the network adjusted
        """
        print "UpdateTopologyFinished"
        self.UpdateEnergy(self.runningFlows, curTime)
        self.Update_dt_network(self.com_network, Shortest)
        self.UpdateFlow(0, UpdateTopology, eventQueue, curTime)
        event = UpdateTopologyEvent(curTime + self.interval)
        eventQueue.push(event, event.time)

    def Update_ct_network(self, graph, matches):
        """
        update the network topology in the controller based on the new matches
        """
        for k in matches:
            for src, dst in matches[k]:
                dst -= self.switches_num
                if (src, dst) not in graph.edges():
                    graph.add_edge(src, dst, weight=1)
                else:
                    graph[src][dst]['weight'] += 1
        if False:
            for src in graph.nodes():
                dsts_dic = graph.neighbors(src)
                dsts = list(dsts_dic)
                print "%s %s" % (src, dsts)
                for dst in dsts:
                    print "(%s,%s),%s" % (src, dst, graph.edge[src][dst]['weight'])

    def Update_dt_network(self, graph, routing=Shortest):
        self.Links.clear()
        self.pathList.clear()
        self.CreateLinks(graph)
        self.AssignPaths(graph, routing)

    def Reconfigure_network(self, scheduler):
        # bipartite graph
        tf_graph = nx.Graph()
        wl_graph = nx.Graph()
        bp_src = range(1, self.switches_num + 1)
        bp_dst = range(self.switches_num + 1, self.switches_num * 2 + 1)

        for src in bp_src:
            tf_graph.add_node(src, bipartite=0, degree=0)
            wl_graph.add_node(src, bipartite=0, degree=0)
        for dst in bp_dst:
            tf_graph.add_node(dst, bipartite=1, degree=0)
            wl_graph.add_node(dst, bipartite=1, degree=0)

        starttime = datetime.now()
        # STEP1. demand estimation max-min fairness
        self.flows_src = {}
        self.flows_dst = {}
        for flow in self.flows:
            if flow.startId not in self.flows_src:
                self.flows_src[flow.startId] = {}
            if flow.endId not in self.flows_dst:
                self.flows_dst[flow.endId] = {}
            self.flows_src[flow.startId][flow.endId] = {'converged': False, 'demand': 0.1, 'recLimited': False}
            self.flows_dst[flow.endId][flow.startId] = {'converged': False, 'demand': 0.1, 'recLimited': False}

        Host_Matrix = dm_est.demand_estimation(self.flows_src, self.flows_dst)
        dm_est.transform(Host_Matrix, self.host_location, tf_graph, self.switches_num, self.threshold)

        d_matches = {}
        s_matches = {}
        new_matches = {}
        # 2. Fully reconfigurable Non-blocking
        if scheduler == 'OSA':
            gt_fully = gen_topo_fully()
            gt_fully._transform(wl_graph, tf_graph, self.dynamic_links, self.switches_num)
            cur_matches = gt_fully._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_fully._color_assignment(self.d_matches, cur_matches,
                                                                             self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'OSA Controller Overhead', timediff

            # print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, sorted(new_matches[k])
                    # print sorted(new_matches[k])
                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum
        elif scheduler == 'Detour':
            gt_detour = gen_topo_detour()
            s_matches = {}
            d_matches = {}
            for x in self.d_matches:
                s_matches[x] = copy.copy(self.d_matches[x])
                d_matches[x] = []
            gt_detour._wavelength_adjustment(self.d_matches, d_matches, s_matches, self.row_size, self.switches_num)

            gt_detour._transform(wl_graph, tf_graph, self.dynamic_links, self.switches_num)
            cur_matches = gt_detour._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_detour._color_assignment(self.d_matches, cur_matches,
                                                                              self.switches_num)
            gt_detour._wavelength_adjustment(new_matches, d_matches, s_matches, self.row_size, self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'Detour Controller Overhead', timediff

            print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, len(new_matches[k])
                    print sorted(new_matches[k])

                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum
        elif scheduler == 'OvS':
            gt_ovs = gen_topo_ovs()
            gt_ovs._transform(wl_graph, tf_graph, self.dynamic_links, self.row_size, self.switches_num)
            cur_matches = gt_ovs._matchings(wl_graph, self.d_matches, self.dynamic_links, self.switches_num)
            (d_matches, s_matches, new_matches) = gt_ovs._color_assignment(self.d_matches, cur_matches,
                                                                           self.switches_num)

            endtime = datetime.now()
            timediff = endtime - starttime
            print 'Detour Controller Overhead', timediff

            print len(d_matches), len(s_matches), len(new_matches)
            if self.show:
                for k in new_matches:
                    print 'new_matches', k, len(new_matches[k])
                    print sorted(new_matches[k])

                for k in s_matches:
                    print 's_matches', k, len(s_matches[k])
                    print sorted(s_matches[k])
                d_sum = 0
                for k in d_matches:
                    d_sum += len(d_matches[k])
                    print 'd_matches', k, len(d_matches[k])
                    print sorted(d_matches[k])
                print d_sum

        self.com_network = nx.DiGraph()
        self.Update_ct_network(self.com_network, self.s_matches)

        if self.show:
            for k in self.s_matches:
                print 'self.s_matches', k, len(self.s_matches[k])
                print sorted(self.s_matches[k])

        self.Update_ct_network(self.com_network, new_matches)

        self.Update_dt_network(self.com_network, Shortest)
