package com.zyf.chapter01.practice.v3;

import com.zyf.chapter01.practice.v2.Aggregator;
import com.zyf.chapter01.practice.v2.StatViewer;
import com.zyf.chapter01.principle.practice.MetricsStorage;
import com.zyf.chapter01.principle.practice.RequestInfo;
import com.zyf.chapter01.principle.practice.RequestStat;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 将组装类的调用逻辑抽象出来
 * @since v3
 * @author yifeng
 */
public abstract class ScheduledReqorter {
    /**
     * 10 分钟
     */
    private static final long MAX_STAT_DURATION_IN_MILLIS = 10 * 60 * 1000;

    private MetricsStorage metricsStorage;
    private Aggregator aggregator;
    private StatViewer viewer;

    public ScheduledReqorter(MetricsStorage metricsStorage, Aggregator aggregator, StatViewer viewer) {
        this.metricsStorage = metricsStorage;
        this.aggregator = aggregator;
        this.viewer = viewer;
    }

    protected void doStartAndReport(long startTimeInMillis, long endTimeInMillis) {
        Map<String, RequestStat> stats = doStat(startTimeInMillis, endTimeInMillis);
        viewer.output(stats, startTimeInMillis, endTimeInMillis);
    }

    /**
     * 针对性能的重构
     * 思路：统计数据量较大，将其划分为小的时间区间，针对每个小的时间区间分别进行统计，然后将统计结果再进行聚合，得到
     * 最终整个时间区间的统计结果。
     * @param startTimeInMillis 统计时间的开始时间
     * @param endTimeInMillis 统计时间的结束时间
     * @return 聚合后的结果
     * @since v3
     */
    private Map<String, RequestStat> doStat(long startTimeInMillis, long endTimeInMillis) {
        HashMap<String, List<RequestStat>> segmentStats = new HashMap<>();
        // 将时间范围划分为多个碎片时间,开始时间为碎片开始时间
        long segmentStartTimeMillis = startTimeInMillis;
        while (segmentStartTimeMillis < endTimeInMillis) {
            // 结束时间为碎片开始时间加上统计单元
            long segmentEndTimeMillis = segmentStartTimeMillis + MAX_STAT_DURATION_IN_MILLIS;
            if (segmentEndTimeMillis > endTimeInMillis) {
                segmentEndTimeMillis = endTimeInMillis;
            }
            // 请求时间区间的数据
            Map<String, List<RequestInfo>> requestInfos =
                    metricsStorage.getRequestInfos(segmentStartTimeMillis, segmentEndTimeMillis);
            if (requestInfos == null || requestInfos.isEmpty()) {
                continue;
            }
            // 聚合时间区间的数据
            Map<String, RequestStat> segmentStat = aggregator.aggregate(
                    requestInfos, segmentEndTimeMillis - segmentStartTimeMillis);
            addStat(segmentStats, segmentStat);
            segmentStartTimeMillis += MAX_STAT_DURATION_IN_MILLIS;
        }
        long durationInMillis = endTimeInMillis - startTimeInMillis;
        return aggregateStats(segmentStats, durationInMillis);
    }

    private void addStat(Map<String, List<RequestStat>> segmentStats,
                         Map<String, RequestStat> segmentStat) {
        for (Map.Entry<String, RequestStat> entry : segmentStat.entrySet()) {
            String apiName = entry.getKey();
            RequestStat stat = entry.getValue();
            List<RequestStat> statList = segmentStats.putIfAbsent(apiName, new ArrayList<>());
            statList.add(stat);
        }
    }

    /**
     * 将多个统计结果进行聚合
     * @param segmentStats 时间区间统计结果
     * @param durationInMillis 统计时间的开始时间减去结束时间，用于计算 tps
     * @return 统计结果
     * @since v3
     */
    private Map<String, RequestStat> aggregateStats(Map<String, List<RequestStat>> segmentStats,
                                                    long durationInMillis) {
        HashMap<String, RequestStat> aggregatedStats = new HashMap<>();
        for (Map.Entry<String, List<RequestStat>> entry : segmentStats.entrySet()) {
            String apiName = entry.getKey();
            List<RequestStat> apiStats = entry.getValue();
            double maxRespTime = Double.MIN_VALUE;
            double minRespTime = Double.MAX_VALUE;
            long count = 0;
            double sumRespTime = 0;
            for (RequestStat stat : apiStats) {
                if (stat.getMaxResponseTime() > maxRespTime) maxRespTime = stat.getMaxResponseTime();
                if (stat.getMinResponseTime() > minRespTime) minRespTime = stat.getMinResponseTime();
                count += stat.getCount();
                sumRespTime += (stat.getCount() * stat.getAvgResponseTime());
            }
            RequestStat aggregatedStat = new RequestStat();
            aggregatedStat.setMaxResponseTime(maxRespTime);
            aggregatedStat.setMinResponseTime(minRespTime);
            aggregatedStat.setAvgResponseTime(sumRespTime / count);
            aggregatedStat.setCount(count);
            aggregatedStat.setTps(count / durationInMillis * 1000);
            aggregatedStats.put(apiName, aggregatedStat);
        }
        return aggregatedStats;
    }

}
