from PyQt5 import uic
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import pyqtgraph as pg
from ga_algorithm import *
import xlrd
import threading
import numpy as np
# 使用深拷贝
from copy import deepcopy
# 使用绘图功能
# 遗传算法库
from deap import base, tools, creator, algorithms
# 随机数
import random

from deap.algorithms import varOr

threadLock = threading.RLock()


class GA_Functions(object):
    def __init__(self, dataDict):
        # 用字典存储所有参数 -- 配送中心坐标、顾客坐标、顾客需求、到达时间窗口、服务时间、车型载重量
        self.dataDict = dataDict
        self.log = None

    # 生成个体==================================
    def genInd1(self, dataDict):
        """
        生成个体， 对我们的问题来说，困难之处在于车辆数目是不定的
        """
        dataDict = self.dataDict
        nCustomer = len(dataDict['NodeCoor']) - 1  # 顾客数量
        perm = np.random.permutation(nCustomer) + 1  # 生成顾客的随机排列,注意顾客编号为1--n
        pointer = 0  # 迭代指针
        lowPointer = 0  # 指针指向下界
        permSlice = []
        # 当指针不指向序列末尾时
        while pointer < nCustomer - 1:
            vehicleLoad = 0
            # 当不超载时，继续装载
            while (vehicleLoad < int(dataDict['MaxLoad'])) and (pointer < nCustomer - 1):
                vehicleLoad += dataDict['Demand'][perm[pointer]]
                pointer += 1
            if lowPointer + 1 < pointer:
                tempPointer = np.random.randint(lowPointer + 1, pointer)
                permSlice.append(perm[lowPointer:tempPointer].tolist())
                lowPointer = tempPointer
                pointer = tempPointer
            else:
                permSlice.append(perm[lowPointer::].tolist())
                break
        # 将路线片段合并为染色体
        ind = [0]
        for eachRoute in permSlice:
            ind = ind + eachRoute + [0]
        return ind

    def genInd2(self, dataDict):
        dataDict = self.dataDict
        nCustomer = len(dataDict['NodeCoor']) - 1  # 顾客数量
        perm = np.random.permutation(nCustomer) + 1  # 生成顾客的随机排列,注意顾客编号为1--n
        Perm = []
        for i in range(0, len(perm)):
            Perm.append(perm[i])
        StartNode = []
        while len(StartNode) < dataDict['CarNumSet'] - 2:
            Zero_Location = np.random.randint(1, len(Perm))

            if Zero_Location not in StartNode:
                StartNode.append(Zero_Location)
            else:
                pass
        # 将路线片段合并为染色体
        StartNode.sort()
        StartNode.reverse()
        ind = [0]
        for eachLocation in StartNode:
            Perm.insert(eachLocation, 0)

        for j in range(0, len(Perm)):
            ind.append(Perm[j])

        ind.append(0)
        return ind

    # 染色体解码==================================
    def decodeInd(self, ind):
        """
        从染色体解码回路线片段，每条路径都是以0为开头与结尾
        """
        indCopy = np.array(deepcopy(ind))  # 复制ind，防止直接对染色体进行改动
        idxList = list(range(len(indCopy)))
        zeroIdx = np.asarray(idxList)[indCopy == 0]
        routes = []
        for i, j in zip(zeroIdx[0::], zeroIdx[1::]):
            routes.append(ind[i:j] + [0])
        return routes

    # 距离辅助函数==================================
    def calDist(self, pos1, pos2):
        """
        计算距离的辅助函数，根据给出的坐标pos1和pos2，返回两点之间的距离
        输入： pos1, pos2 -- (x,y)元组
        输出： 欧几里得距离
        """
        return np.sqrt((pos1[0] - pos2[0]) * (pos1[0] - pos2[0]) + (pos1[1] - pos2[1]) * (pos1[1] - pos2[1]))

    # 设置惩罚函数==================================
    def loadPenalty(self, routes):
        """
        辅助函数，因为在交叉和突变中可能会产生不符合负载约束的个体，需要对不合要求的个体进行惩罚
        """
        penalty = 0
        # 计算每条路径的负载，取max(0, （routeLoad - maxLoad）*10)计入惩罚项
        # 计算每条路径的距离，时间=（非0节点数*5 + 总距离/速度），取max(0，（WorkTime - maxWorkTime）*10)计入惩罚项
        for eachRoute in routes:
            routeLoad = np.sum([self.dataDict['Demand'][j] for j in eachRoute])
            penalty += max(0, (routeLoad - self.dataDict['MaxLoad']) * 10)

        RoutesWorkTime = self.calRouteWorkTime(routes, self.dataDict)
        for eachRouteWorkTime in RoutesWorkTime:
            penalty += max(0, (eachRouteWorkTime - self.dataDict['WorkTime']) * 10)
        return penalty

    # 计算当前路径总长度==================================
    def calRouteLen(self, routes, dataDict):
        """
        辅助函数，返回给定路径的总长度
        """
        if dataDict is None:
            dataDict = dataDict
        totalDistance = 0  # 记录各条路线的总长度
        for eachRoute in routes:
            # 从每条路径中抽取相邻两个节点，计算节点距离并进行累加
            for i, j in zip(eachRoute, eachRoute[1::]):
                totalDistance += self.calDist(dataDict['NodeCoor'][i], dataDict['NodeCoor'][j])
        return totalDistance

    # 计算每条路径的工作时间，返回时间数组===================================
    def calRouteWorkTime(self, routes, dataDict):
        RoutesLength = self.calSubRouteLen(routes, dataDict)
        WorkTimes = []
        for eachRouteLength in range(0, len(RoutesLength)):
            WorkTime = (RoutesLength[eachRouteLength] / dataDict['Speed']) * 60 + 5 * (len(routes[eachRouteLength]) - 2)
            WorkTimes.append(WorkTime)
        return WorkTimes

    # 计算每条路径的负载，返回负载数组===================================
    def calLoad(self, routes):
        loads = []
        load = 0
        for eachRoute in routes:
            load = 0
            for j in range(0, len(eachRoute)):
                load += self.dataDict['Demand'][j]
            loads.append(load)
        return loads

    # 计算某条路径的距离===================================
    def calSubRouteLen(self, routes, dataDict):
        """
        辅助函数，返回给定路径的总长度
        """
        if dataDict is None:
            dataDict = dataDict
        Length = []
        for everyRoute in routes:
            # 从每条路径中抽取相邻两个节点，计算节点距离并进行累加
            Distance = 0
            for m, n in zip(everyRoute, everyRoute[1::]):
                Distance += self.calDist(dataDict['NodeCoor'][m], dataDict['NodeCoor'][n])
            Length.append(Distance)
        return Length

    # 综合判断函数（里程+惩罚）==================================
    def evaluate(self, ind):
        """
        评价函数，返回解码后路径的总长度与惩罚值的有机和
        """
        routes = self.decodeInd(ind)  # 将个体解码为路线
        totalDistance = self.calRouteLen(routes, self.dataDict)
        return (totalDistance + self.loadPenalty(routes)),

    # 交叉操作==================================
    def genChild(self, ind1, ind2, nTrail=5):
        """
        参考《基于电动汽车的带时间窗的路径优化问题研究》中给出的交叉操作，生成一个子代
        """
        # 在ind1中随机选择一段子路径subroute1，将其前置
        routes1 = self.decodeInd(ind1)  # 将ind1解码成路径
        numSubroute1 = len(routes1)  # 子路径数量
        subroute1 = routes1[np.random.randint(0, numSubroute1)]
        # 将subroute1中没有出现的顾客按照其在ind2中的顺序排列成一个序列
        unvisited = set(ind1) - set(subroute1)  # 在subroute1中没有出现访问的顾客
        unvisitedPerm = [digit for digit in ind2 if digit in unvisited]  # 按照在ind2中的顺序排列
        # 多次重复随机打断，选取适应度最好的个体
        bestRoute = None  # 容器
        bestFit = np.inf
        for _ in range(nTrail):
            # 将该序列随机打断为numSubroute1-1条子路径
            breakPos = [0] + random.sample(range(1, len(unvisitedPerm)), numSubroute1 - 2)  # 产生numSubroute1-2个断点
            breakPos.sort()
            breakSubroute = []
            for i, j in zip(breakPos[0::], breakPos[1::]):
                breakSubroute.append([0] + unvisitedPerm[i:j] + [0])
            breakSubroute.append([0] + unvisitedPerm[j:] + [0])
            # 更新适应度最佳的打断方式
            # 将先前取出的subroute1添加入打断结果，得到完整的配送方案
            breakSubroute.append(subroute1)
            # 评价生成的子路径
            routesFit = self.calRouteLen(breakSubroute, self.dataDict) + self.loadPenalty(breakSubroute)
            if routesFit < bestFit:
                bestRoute = breakSubroute
                bestFit = routesFit
        # 将得到的适应度最佳路径bestRoute合并为一个染色体
        child = []
        for eachRoute in bestRoute:
            child += eachRoute[:-1]
        return child + [0]

    def crossover(self, ind1, ind2):
        """
        交叉操作
        """
        ind1[:], ind2[:] = self.genChild(ind1, ind2), self.genChild(ind2, ind1)
        return ind1, ind2

    # 突变操作==================================
    def opt(self, route, dataDict, k=2):
        # 用2-opt算法优化路径
        # 输入：
        # route -- sequence，记录路径
        # 输出： 优化后的路径optimizedRoute及其路径长度
        nCities = len(route)  # 城市数
        optimizedRoute = route  # 最优路径
        minDistance = self.calRouteLen([route], self.dataDict)  # 最优路径长度
        for i in range(1, nCities - 2):
            for j in range(i + k, nCities):
                if j - i == 1:
                    continue
                reversedRoute = route[:i] + route[i:j][::-1] + route[j:]  # 翻转后的路径
                reversedRouteDist = self.calRouteLen([reversedRoute], self.dataDict)
                # 如果翻转后路径更优，则更新最优解
                if reversedRouteDist < minDistance:
                    minDistance = reversedRouteDist
                    optimizedRoute = reversedRoute
        return optimizedRoute

    def mutate(self, ind):
        """
        用2-opt算法对各条子路径进行局部优化
        """
        routes = self.decodeInd(ind)
        optimizedAssembly = []
        for eachRoute in routes:
            optimizedRoute = self.opt(eachRoute, self.dataDict)
            optimizedAssembly.append(optimizedRoute)
        # 将路径重新组装为染色体
        child = []
        for eachRoute in optimizedAssembly:
            child += eachRoute[:-1]
        ind[:] = child + [0]
        return ind,

    def eaMuPlusLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen, LogDeliver, BestIndividual,
                       BestDeliver, AvgDeliver, PopulationDeliver, stats=None, halloffame=None, verbose=__debug__):

        logbook = tools.Logbook()
        logbook.header = ['gen', 'nevals'] + (stats.fields if stats else [])
        # Evaluate the individuals with an invalid fitness
        invalid_ind = [ind for ind in population if not ind.fitness.valid]
        fitnesses = toolbox.map(toolbox.evaluate, invalid_ind)
        for ind, fit in zip(invalid_ind, fitnesses):
            ind.fitness.values = fit

        if halloffame is not None:
            halloffame.update(population)

        record = stats.compile(population) if stats is not None else {}
        logbook.record(gen=0, nevals=len(invalid_ind), **record)

        if verbose:
            threadLock.acquire()
            try:
                # print(halloffame)
                a = logbook.stream
                b = halloffame.items[0]
                # print(a)
                bst = logbook.select('min')
                avg = logbook.select('avg')
                BestDeliver.put(bst)
                AvgDeliver.put(avg)
                LogDeliver.put(a)
                BestIndividual.put(b)
                PopulationDeliver.put(population)
                #print(a)
                # print(LogDeliver.queue)
            finally:
                threadLock.release()

        # Begin the generational process
        for gen in range(1, ngen + 1):
            # Vary the population
            offspring = varOr(population, toolbox, lambda_, cxpb, mutpb)

            # Evaluate the individuals with an invalid fitness
            invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
            fitnesses = toolbox.map(toolbox.evaluate, invalid_ind)
            for ind, fit in zip(invalid_ind, fitnesses):
                ind.fitness.values = fit

            # Update the hall of fame with the generated individuals
            if halloffame is not None:
                halloffame.update(offspring)

            # Select the next generation population
            population[:] = toolbox.select(population + offspring, mu)

            # Update the statistics with the new population
            record = stats.compile(population) if stats is not None else {}
            logbook.record(gen=gen, nevals=len(invalid_ind), **record)
            if verbose:
                threadLock.acquire()
                try:
                    # print(halloffame)
                    a = logbook.stream
                    b = halloffame.items[0]
                    # print(a)
                    bst = logbook.select('min')
                    avg = logbook.select('avg')
                    BestDeliver.put(bst)
                    AvgDeliver.put(avg)
                    LogDeliver.put(a)
                    BestIndividual.put(b)
                    PopulationDeliver.put(population)
                    #print(a)
                    # print(LogDeliver.queue)
                finally:
                    threadLock.release()

        return population, logbook, halloffame
