from os import abort

from fileStream import FileStream
from dataClean import smooth
from analyse import FREQUENCE, Analyser, MultiPersonRTPeakAnalyser, MultiPersonRTPeakAndDataAnalyser, RTPeakAnalyser, RTPeakAndFrequenceAnalyser, SplitAnalyser, MultiPersonRTPeakAndFrequenceAnalyser, RTPeakAndDatasAnalyser
from evaluate import Evaluator, EvaluatorWithStandard, GraphEvaluator
from socketStream import SocketStream
from generateDataForUnity import generateMultiPersonUnityData
from transfer import sendConsistencyToUnity
from visualize import showData, showArray, showWithTimeMultiPerson
import warnings

# 与标准动作比较，计算一致性
def analyser_and_evaluator_test():
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    # stream = SocketStream("0.0.0.0", 8080)
    stream = FileStream("./data/fast/1.txt")  # 这波数据的通道7在前，通道0在最后
    analyser = Analyser()
    evaluator = EvaluatorWithStandard()
    frameId = 0
    limit = 20000 # 只取数据的前limit帧
    for channel in stream:
        frameId = frameId + 1
        if frameId == limit:
            break
        # channel = smooth(channel)
        res = analyser.analyse(channel) # 当一个动作结束时，会返回此动作的特征向量
        if res != None:
            evaluator.evaluate(res) # 计算与标准动作的一致程度
    print(frameId)

# 将单人的数据分割成多份，模拟多人数据
def split_analyser_test(personNum, filePath):
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    stream = FileStream(filePath)  # 这波数据的通道7在前，通道0在最后
    limitActionsNum = 29 # 根据数据情况，决定只取前 limitActionsNum 个划桨动作
    analyser = SplitAnalyser(limitActionsNum)
    for channel in stream:
        channel = smooth(channel)
        actionsStart = analyser.analyse(channel)
        if actionsStart != None:
            print(actionsStart)
            break

    actionsNum = int(limitActionsNum / personNum)  # 每个人分到的动作数
    persons = []  # persons为 personNums * frames * 8 为数据
    data = stream.loadFile()
    for i in range(personNum):
        startFrame = actionsStart[i * actionsNum]
        endFrame = actionsStart[(i + 1) * actionsNum]
        persons.append(data[startFrame: endFrame])
    
    return persons

# 计算多个人的一致性
def multi_person_consitency(personNum, filePath):
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    persons = split_analyser_test(personNum, filePath) # 将filePath处的数据分成personNum个人的

    evaluator = Evaluator()
    consistency = evaluator.evaluate(persons)
    showArray(consistency)


# 分析多人的动作
# 实时分析出每个通道值得波峰，一旦识别出，边发送给unity3d
def multi_person_rtpeak_analyser_test(personNum, filePath):
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    # stream = SocketStream("0.0.0.0", 8080)
    persons = split_analyser_test(personNum, filePath)
    analyser = MultiPersonRTPeakAnalyser(personNum)
    for frameIdx in range(len(persons[0])):
        # channel = smooth(channel)
        analyser.analyse([persons[i][frameIdx] for i in range(personNum)])

def rtpeak_and_frequence_analyser():
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    data = FileStream("./data/new_data.txt").loadFile()[:20000]
    anlyser = RTPeakAndFrequenceAnalyser()
    for frame in data:
        anlyser.analyse(frame, 0)


def multi_person_show(path):
    warnings.warn("此方法已废弃，不推荐使用", DeprecationWarning)
    persons = split_analyser_test(6, path)
    showWithTimeMultiPerson(persons)


# 分析单人的动作
# 实时分析出每个通道值得波峰，一旦识别出，边发送给unity3d
def analyser_test():
    # stream = SocketStream("0.0.0.0", 8080)
    stream = FileStream("./data/slow/3.txt")  # 这波数据的通道7在前，通道0在最后
    analyser = RTPeakAndDatasAnalyser()
    frameId = 0
    limit = 20000
    for channel in stream:
        frameId = frameId + 1
        if frameId == limit:
            break
        # channel = smooth(channel)
        analyser.analyse(channel)
    print(frameId)

# 分析多人的动作
# 实时分析出每个通道值得波峰，一旦识别出，边发送给unity3d
def multi_person_analyser_test():
    # stream = SocketStream("0.0.0.0", 8080)
    # persons = split_analyser_test(personNum, filePath)
    frameLimit = 8000
    files = ["./data/fast/1.txt", "./data/fast/2.txt", "./data/fast/3.txt", "./data/slow/1.txt", "./data/slow/2.txt", "./data/slow/3.txt"]
    persons = []
    personNum = len(files)
    for i in range(personNum):
        data = FileStream(files[i]).loadFile()[:frameLimit]
        persons.append(data)
        frameLimit = min(frameLimit, len(data))

    analyser = MultiPersonRTPeakAndDataAnalyser(personNum)
    evaluator = GraphEvaluator()
    for frameIdx in range(frameLimit):
        # channel = smooth(channel)
        frames = [persons[i][frameIdx] for i in range(personNum)]
        analyser.analyse(frames)
        consistency = evaluator.evaluate(frames)

        # 发送一致性数据给unity
        consistencyData = [frameIdx / FREQUENCE, consistency]
        sendConsistencyToUnity(consistencyData)


# 分析数据一致性，遍历所有帧，对每个帧调用函数计算平均值(lgg)
def multi_person_graph_consistency_test():
    # stream = SocketStream("0.0.0.0", 8080)
    # persons = split_analyser_test(personNum, filePath)
    frameLimit = 8000
    files = ["./data/fast/1.txt", "./data/fast/2.txt", "./data/fast/3.txt", "./data/slow/1.txt", "./data/slow/2.txt", "./data/slow/3.txt"]
    # persons中存的是personNum个人，每个人的frameLimit帧，每帧保存8通道数据
    # 第一维度是人数，第二维度是帧数，第三维度为通道数
    persons = []
    for i in range(6):
        data = FileStream(files[i]).loadFile()[:frameLimit]
        persons.append(data)
        frameLimit = min(frameLimit, len(data))

    evaluator = Evaluator()
    res = []
    # 遍历帧，计算每一帧的一致性
    for i in range(frameLimit):
        data = []
        for j in range(6):
            data.append(persons[j][i])
        res.append(evaluator.evaluate2(data))
    # print(len(res))
    # print(res)
    showArray(res)
    return res


if __name__ == "__main__":
    filePath = "./data/new_data.txt"

    # analyser_and_evaluator_test()
    # multi_person_consitency(4)
    # generateMultiPersonUnityData(n= 4, during= 2, T= 2.5, repeat= 10)
    # multi_person_rtpeak_analyser_test(personNum=6, filePath=filePath)
    # rtpeak_and_frequence_analyser()
    # multi_person_rtpeak_and_frequence_analyser_test(personNum=6, filePath=filePath)
    # multi_person_show(filePath)
    # multi_person_graph_consistency_test()
    
    # analyser_test()
    multi_person_analyser_test()