//
// Created by Administrator on 2023/4/11.
//

#ifndef WEBRTC_MODEL_FRAMEANALYZE_H
#define WEBRTC_MODEL_FRAMEANALYZE_H

#include <bits/stdc++.h>

using namespace std;

struct FrameData {
    long long id;
    //单位 ms
    long long renderTime;
    long long receiveTime;
    int isKeyFrame;
    long long uuid;
    long long saveFrameTime;

    FrameData(long long int id, long long int renderTime, long long int receiveTime, int isKeyFrame,
              long long int uuid, long long int saveFrameTime) : id(id), renderTime(renderTime),
                                                                 receiveTime(receiveTime), isKeyFrame(isKeyFrame),
                                                                 uuid(uuid), saveFrameTime(saveFrameTime) {}
};

class FrameAnalyze {
private:
    //帧数据
    vector<FrameData> frameList;
public:
    const int IntervalThreshold = 1000 / 20;

    FrameAnalyze() {
        //输入数据
        inputFrameList();
    }

    /**
    * 获取平均帧率
    * @return
    */
    double getAvgFrameRate() {
        if (frameList.size() < 2) { throw exception(); }
        return static_cast<double>((frameList[0].receiveTime - frameList.back().receiveTime)) /
               static_cast<double>(frameList.size());
    }

    /**
    * 分析帧间隔
    */
    void analyzeFrameRenderInterval() {
        //处理帧渲染间隔分布
        handleFrameInterval();
        //统计关键帧的间隔
        handleKeyFrameInterval();
        //统计卡尔曼滤波器的优化效果，等距统计
        handleReceiveRenderInterval(IntervalThreshold);
        //统计卡尔曼滤波器的优化效果，累计统计
        handleReceiveRenderAccumulationInterval(IntervalThreshold);
    }

    void inputFrameList() {
        ifstream frameRenderTimeInput;
        frameRenderTimeInput.open("log/frameRender.log", ios::in);
        long long id_;
        //单位 ms
        long long renderTime_;
        long long receiveTime_;
        int isKeyFrame_;
        long long uuid_;
        long long saveFrameTime_;
        while (frameRenderTimeInput >> id_ >> renderTime_ >> receiveTime_ >> isKeyFrame_ >> uuid_ >> saveFrameTime_) {
            frameList.emplace_back(id_, renderTime_, receiveTime_, isKeyFrame_, uuid_, saveFrameTime_);
        }
        frameRenderTimeInput.close();
    }

    const vector<FrameData> &getFrameList() const {
        return frameList;
    }

    //将多个细粒度的map整合到一起统计数据
    void handleReceiveRenderInterval(const int intervalThreshold) {
        map<long long, vector<long long>> mapReceiveRenderInterval;
        for (int minutesCount = 5; minutesCount <= 180; minutesCount += 5) {

            handleReceiveRenderIntervalInternal(intervalThreshold, minutesCount * 60 * 1000,
                                                mapReceiveRenderInterval);
        }
        //输出
        outputReceiveRenderMap(mapReceiveRenderInterval, "log/mapReceiveRenderInterval-" + to_string(0) + ".csv");
    }

    void handleReceiveRenderAccumulationInterval(int intervalThreshold) {
        map<long long, vector<long long>> accumulationMapReceiveRenderInterval;
        //对于累计时间而言，细粒度越小越好，这里取5分钟
        handleReceiveRenderAccumulationIntervalInternal(intervalThreshold, 5 * 60 * 1000,
                                                        accumulationMapReceiveRenderInterval);
        outputReceiveRenderMap(accumulationMapReceiveRenderInterval, "log/accumulationMapReceiveRenderInterval.csv");
    }

    //累计统计映射关系
    void handleReceiveRenderAccumulationIntervalInternal(int intervalThreshold,
                                                    int handlePerMs,
                                                    map<long long, vector<long long>> &accumulationMapReceiveRenderInterval) {
        if (frameList.empty()) { return; }
        auto firstFrame = frameList[0];
        //这两个变量统计控制操作时机
        long long intervalBeginTime = firstFrame.receiveTime;
        long long intervalMaxTime = firstFrame.receiveTime;
        //这两个变量用于统计实际的数据
        long long excessiveReceiveIntervalTotal = 0;
        long long excessiveRenderIntervalTotal = 0;
        for (int i = 1; i - 1 < frameList.size(); i++) {
            //统计卡顿时长
            if (i < frameList.size()) {
                intervalMaxTime = max(intervalMaxTime, frameList[i].receiveTime);
                auto receiveInterval = frameList[i].receiveTime - frameList[i - 1].receiveTime;
                auto renderInterval = frameList[i].renderTime - frameList[i - 1].renderTime;
                if (renderInterval > intervalThreshold) {
                    excessiveRenderIntervalTotal += renderInterval;
                }
                if (receiveInterval > intervalThreshold) {
                    excessiveReceiveIntervalTotal += receiveInterval;
                }
            }
            //到达了指定时间，或者到达了帧列表的末尾(末尾不统计，这个点和其他数据偏差很大)
            if (/*i == frameList.size() || */intervalMaxTime - intervalBeginTime > handlePerMs) {
                accumulationMapReceiveRenderInterval[excessiveReceiveIntervalTotal].push_back(
                        excessiveRenderIntervalTotal);
                intervalBeginTime = frameList[i].receiveTime;
                intervalMaxTime = 0;
            }
        }
    }

    //统计关键帧的间隔
    void handleKeyFrameInterval() {
        map<int, int> keyFrameIntervalDistribution;
        long long index = 0;
        while (index < frameList.size() && frameList[index].isKeyFrame == false) {
            index++;
        }
        int intervalCount = 0;
        double totalCount = 0;
        for (; index < frameList.size(); index++) {
            if (frameList[index].isKeyFrame) {
                if (intervalCount != 0) {
                    keyFrameIntervalDistribution[intervalCount]++;
                    totalCount++;
                    intervalCount = 0;
                }
            }
            intervalCount++;
        }
        ofstream keyFrameIntervalDistributionOutput;
        keyFrameIntervalDistributionOutput.open("log/keyFrameIntervalDistribution.csv", ios::out | ios::trunc);
        for (const auto &item: keyFrameIntervalDistribution) {
            keyFrameIntervalDistributionOutput << item.first << "," << item.second / totalCount << endl;
        }
    }

    /**
     * 统计卡尔曼滤波器的效果
     * @param frameList 帧数据
     * @param intervalThreshold 卡顿时间阈值，间隔超过这个阈值就视为卡顿
     * @param handlePerMs 统计细粒度，每隔这么多时间统计一次
     * @param mapReceiveRenderInterval 映射关系保存在这个map中
     */
    void handleReceiveRenderIntervalInternal(int intervalThreshold, int handlePerMs,
                                             map<long long, vector<long long>> &mapReceiveRenderInterval) {
        if (frameList.empty()) { return; }
        auto firstFrame = frameList[0];
        long long intervalBeginTime = firstFrame.receiveTime;
        long long intervalMaxTime = firstFrame.receiveTime;
        long long excessiveReceiveIntervalTotal = 0;
        long long excessiveRenderIntervalTotal = 0;
        for (int i = 1; i - 1 < frameList.size(); i++) {
            //统计卡顿时长
            if (i < frameList.size()) {
                intervalMaxTime = max(intervalMaxTime, frameList[i].receiveTime);
                auto receiveInterval = frameList[i].receiveTime - frameList[i - 1].receiveTime;
                auto renderInterval = frameList[i].renderTime - frameList[i - 1].renderTime;
                if (renderInterval > intervalThreshold) {
                    excessiveRenderIntervalTotal += renderInterval;
                }
                if (receiveInterval > intervalThreshold) {
                    excessiveReceiveIntervalTotal += receiveInterval;
                }
            }
            //到达了指定时间，或者到达了帧列表的末尾(末尾不统计，这个点和其他数据偏差很大)
            if (/*i == frameList.size() || */intervalMaxTime - intervalBeginTime > handlePerMs) {
                mapReceiveRenderInterval[excessiveReceiveIntervalTotal].push_back(excessiveRenderIntervalTotal);
                excessiveReceiveIntervalTotal = 0;
                excessiveRenderIntervalTotal = 0;
                intervalBeginTime = frameList[i].receiveTime;
                intervalMaxTime = 0;
            }
        }
    }

    /**
     * 将map以csv格式的方式输出
     * @param mapReceiveRenderInterval
     */
    static void outputReceiveRenderMap(map<long long int, vector<long long int>> &mapReceiveRenderInterval,
                                       const string &outputFileName) {
        ofstream receiveRenderIntervalOutput;
        //trunc和app不能同时设置
        receiveRenderIntervalOutput.open(outputFileName, ios::out | ios::trunc);
        for (const auto &item: mapReceiveRenderInterval) {
            auto receiveTimeInterval = item.first;
            auto renderTimeInterval = item.second;
            double renderTimeIntervalSum = static_cast<double>(accumulate(renderTimeInterval.begin(),
                                                                          renderTimeInterval.end(), 0.0));
            receiveRenderIntervalOutput << receiveTimeInterval << ","
                                        << static_cast<long long>(renderTimeIntervalSum /
                                                                  static_cast<double>(renderTimeInterval.size()))
                                        << endl;
        }
    }

    //处理帧渲染间隔分布
    void handleFrameInterval() {
        map<long long, double> frameIntervalDistribution;
        for (int i = 1; i < frameList.size(); i++) {
            frameIntervalDistribution[frameList[i].renderTime - frameList[i - 1].renderTime]++;
        }

        ofstream frameIntervalDistributionOutput;
        frameIntervalDistributionOutput.open("log/frameIntervalDistribution.csv", ios::out | ios::trunc);
        for (auto &item: frameIntervalDistribution) {
            frameIntervalDistributionOutput << item.first << ","
                                            << item.second / static_cast<double>(frameList.size() - 1) << endl;
        }
    }


};

#endif //WEBRTC_MODEL_FRAMEANALYZE_H
