import numpy as np

from constants import *
from data_process.DataAugmentation import DataAugmentation
from data_process.DataGenerator import DataGenerator
from data_process.path_algorithm.FromMultiDirAlgorithm import MultiDirDiffAlgorithm
from data_process.path_algorithm.FromOneDirAlgorithm import SingleDirSameAlgorithm
from data_process.path_algorithm.MultiDirSameAlgorithm import MultiDirSameAlgorithm
from utils.file_util import *


class DatasetGenerator():

    def __init__(self, params):
        self.params = params
        self.rootPath = params.rootPath
        self.volun = params.volun
        self.innerDirName = params.innerDirName
        self.vocabPath = params.vocabPath
        self.mapPath = params.mapPath
        self.trainPath = TRAIN_PATH
        self.testPath = TEST_PATH
        self.featureMaxLen = params.featureMaxLen
        self.isGeneratePath = params.isGeneratePath


    def loadTimeMark(self):
        with open(TOTAL_MARK_PATH, 'rb') as f:
            self.timeMark = pickle.load(f)

    def generateTrainTestPathFile(self):
        self.pathGenerateType = self.params.pathGenerateType
        if self.pathGenerateType not in PATH_GENERATE_TYPE:
            raise ValueError
        if self.pathGenerateType == 'single_same':
            pathAlgo = SingleDirSameAlgorithm(self.params)
        elif self.pathGenerateType == 'multi_same':
            pathAlgo = MultiDirSameAlgorithm(self.params)
        elif self.pathGenerateType == 'multi_diff':
            pathAlgo = MultiDirDiffAlgorithm(self.params)
        else:
            raise ValueError
        pathAlgo.generatePath()

    def addPrefixOfPathFile(self, filePath, prefix):
        fullPath = []
        allPath = readDataFromFile(filePath)
        for path in allPath:
            res = os.path.join(prefix, path)
            fullPath.append(res)
        with open(filePath, 'w') as f:
            for curPath in fullPath:
                f.write(f'{curPath}\n')

    def processData(self, filePath):
        allPath = readDataFromFile(filePath)
        # self.loadTimeMark()

        feature = []
        label = []
        for curPath in allPath:
            wavPath = curPath
            # self.params.timeMark = self.timeMark
            dataGenerator = DataGenerator(wavPath, self.params)
            sinFeature, sinLabel = dataGenerator.createdata()
            feature.append(sinFeature)
            label.append(sinLabel)

        return [feature, label]

    def normalizeData(self, trainFeature, testFeature, nType='local'):
        if(nType=='global'):
            trainMax = np.max(self.trainAllMax)
            trainMin = np.min(self.trainAllMin)
            testMax = np.max(self.testAllMax)
            testMin = np.min(self.testAllMin)
            valMax = max(trainMax, testMax)
            valMin = min(trainMin, testMin)
            trainFeature = (trainFeature - valMin) / (valMax - valMin)
            testFeature = (testFeature - valMin) / (valMax - valMin)

        elif(nType=='local'):
            trainFeature = self.localNormalize(trainFeature, self.trainAllMax, self.trainAllMin)
            testFeature = self.localNormalize(testFeature, self.testAllMax, self.testAllMin)
        else:
            raise ValueError
        return trainFeature, testFeature


    def saveData(self, data, savePath):
        with open(savePath, 'wb') as f:
            pickle.dump(data, f)

    def localNormalize(self, feature, maxArr, minArr):
        n = feature.shape[0]
        c = feature.shape[1]
        for i in range(n):
            for j in range(c):
                curMax = maxArr[i][j]
                curMin = minArr[i][j]
                feature[i][j] = (feature[i][j] - curMin) / (curMax - curMin)
        return feature

    def getAllLimit(self, feature, limitType):
        val = []
        n = len(feature)
        c = len(feature[0])
        for i in range(n):
            cVal = []
            for j in range(c):

                if(limitType == 'max'):
                    cVal.append(np.max(feature[i][j]))
                else:
                    cVal.append(np.min(feature[i][j]))
            val.append(cVal)
        val = np.array(val)
        return val


    def padDataAndTranspose(self, feature):
        n = len(feature)
        c = len(feature[0])
        for i in range(n):
            for j in range(c):
                sinF = feature[i][j]
                dataLen = sinF.shape[1]
                padLen = self.featureMaxLen - dataLen
                feaLen = sinF.shape[0]
                halfPadLen = padLen // 2

                leftPad = np.zeros((feaLen, halfPadLen))
                rightPad = np.zeros((feaLen, padLen - halfPadLen))
                # feature[i][j] = np.concatenate((leftPad, sinF, rightPad), axis=1)
                feature[i][j] = np.concatenate((sinF, leftPad, rightPad), axis=1)
        feature = np.array(feature)
        feature = feature.transpose((0, 1, 3, 2))
        return feature


    def emsembleProcess(self):
        self.generateTrainTestPathFile()
        trainData = self.processData(self.trainPath)
        testData = self.processData(self.testPath)

        self.trainAllMin = self.getAllLimit(trainData[0], 'min')
        self.trainAllMax = self.getAllLimit(trainData[0], 'max')
        self.testAllMin = self.getAllLimit(testData[0], 'min')
        self.testAllMax = self.getAllLimit(testData[0], 'max')

        trainData[0] = self.padDataAndTranspose(trainData[0])
        testData[0] = self.padDataAndTranspose(testData[0])

        trainData[0], testData[0] = self.normalizeData(trainData[0], testData[0])

        if self.params.dataMode == DataModoType.SINGLE:
            augment = DataAugmentation(trainData)
            augment.augAndSaveData()
        else:
            trainSavePath = "dataset/train.dp"
            self.saveData(trainData, trainSavePath)

        # print(trainData[0].shape)
        # print(testData[0].shape)


        testSavePath = "dataset/test.dp"
        self.saveData(testData, testSavePath)
