package cn.hexcloud.dga.governance.assessor.calc;

import cn.hexcloud.dga.common.constant.MetaConst;
import cn.hexcloud.dga.common.util.HttpUtil;
import cn.hexcloud.dga.common.util.StringUtil;
import cn.hexcloud.dga.ds.bean.TDsTaskInstance;
import cn.hexcloud.dga.ds.service.TDsTaskInstanceService;
import cn.hexcloud.dga.governance.assessor.Assessor;
import cn.hexcloud.dga.governance.bean.AssessParam;
import cn.hexcloud.dga.governance.bean.GovernanceAssessDetail;
import cn.hexcloud.dga.governance.bean.GovernanceMetric;
import cn.hexcloud.dga.meta.bean.TableMetaInfo;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import lombok.Data;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * @author jiangdan7
 */
@Component("DATA_SKEW")
public class DataSkewAssessor extends Assessor {

    @Autowired
    TDsTaskInstanceService tDsTaskInstanceService;

    @Value("${history-server.host}")
    String histServerHost;

    @Value("${history-server.port}")
    String histServerPort;

    @Override
    public void checkProblem(GovernanceAssessDetail governanceAssessDetail, AssessParam assessParam) throws Exception {
        TableMetaInfo tableMetaInfo = assessParam.getTableMetaInfo();
        GovernanceMetric governanceMetric = assessParam.getGovernanceMetric();
        String metricParamsJson = governanceMetric.getMetricParamsJson();
        JSONObject parseObject = JSON.parseObject(metricParamsJson);
        BigDecimal percent = parseObject.getBigDecimal("percent");
        Integer stageDurMilliSeconds = parseObject.getInteger("stage_dur_seconds") * 1000;

        // 查询昨日运行的实例获得yarn_id
        TDsTaskInstance taskInstance = tDsTaskInstanceService.getOne(new QueryWrapper<TDsTaskInstance>().eq("name", tableMetaInfo.getSchemaName() + "." + tableMetaInfo.getTableName()).eq("state", MetaConst.DS_TASK_SUCCESS).eq("date_format(start_time,'%Y-%m-%d')", assessParam.getAssessDate()).last("limit 1"));
        if (taskInstance == null) {
            return;
        }
        // 通过yarn_id查询
        String yarnId = taskInstance.getAppLink();
        if (StringUtil.isEmpty(yarnId)) {
            return;
        }
        List<Stage> stageList = getStageListByYarnId(yarnId, histServerHost + ":" + histServerPort);
        StringBuilder stringBuilder = new StringBuilder();
        for (Stage stage : stageList) {
            if (!stage.getDiffMaxAvgPercent().equals(BigDecimal.valueOf(100)) && stage.getDiffMaxAvgPercent().compareTo(percent) > 0 && stage.getStageDuration() > stageDurMilliSeconds) {
                stringBuilder.append("stage:" + stage.getStageId() + "存在数据倾斜,(" + stage.getDiffMaxAvgPercent() + "%," + stage.getStageDuration() + ")");
            }
        }
        if (stringBuilder.length() > 0) {
            governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
            governanceAssessDetail.setAssessProblem(stringBuilder.toString());
        }
    }

    private List<Stage> getStageListByYarnId(String yarnId, String histServerUrl) throws Exception {
        Integer attemptId = getAttemptId(yarnId, histServerUrl);
        if (attemptId == null) {
            throw new Exception("未找到对应 attemptId");
        }
        List<Integer> stageIdList = getStageIdList(yarnId, histServerUrl, attemptId);
        if (stageIdList.size() == 0) {
            throw new Exception("未找到对应stageList");
        }
        return getStageListByStageId(yarnId, histServerUrl, attemptId, stageIdList);
    }

    /**
     * [ {
     * "status" : "COMPLETE",
     * "stageId" : 1,
     * "attemptId" : 0,
     * "numTasks" : 2,
     * "numActiveTasks" : 0,
     * "firstTaskLaunchedTime" : "2023-05-23T16:00:14.412GMT",
     * "completionTime" : "2023-05-23T16:00:16.975GMT",
     * tasks" : {
     * "2" : {
     * "taskId" : 2,
     * "index" : 1,
     * "attempt" : 0,
     * "partitionId" : 1,
     * "launchTime" : "2023-05-23T16:00:14.414GMT",
     * "duration" : 2560,
     * "executorId" : "2",
     * "host" : "hadoop102",
     * ...
     * },
     * "1" : {
     * "taskId" : 2,
     * "index" : 0,
     * "attempt" : 0,
     * "partitionId" : 0,
     * "launchTime" : "2023-05-23T16:00:14.122GMT",
     * "duration" : 2565,
     * "executorId" : "2",
     * "host" : "hadoop102",
     * ..
     * }
     * ]
     */
    private List<Stage> getStageListByStageId(String yarnId, String histServerUrl, Integer attemptId, List<Integer> stageIdList) throws ParseException {
        List<Stage> stageList = new ArrayList<>();
        for (Integer stageId : stageIdList) {
            //http://hadoop102:8020/api/v1/applications/application_1684083580862_0012/1/stages/1
            String url = "http://" + histServerUrl + "/api/v1/applications/" + yarnId + "/" + attemptId + "/stages/" + stageId;
            String stageJson = HttpUtil.get(url);
            List<JSONObject> stageJsonObjList = JSON.parseArray(stageJson, JSONObject.class);
            for (JSONObject stageJsonObj : stageJsonObjList) {
                if ("COMPLETE".equals(stageJsonObj.getString("status"))) {
                    Stage stage = new Stage();
                    stage.stageId = stageJsonObj.getInteger("stageId");
                    Date firstTasksLaunchedTime = DateUtils.parseDate(stageJsonObj.getString("firstTaskLaunchedTime"), "yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'");
                    Date completionTime = DateUtils.parseDate(stageJsonObj.getString("completionTime"), "yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'");
                    stage.stageDuration = completionTime.getTime() - firstTasksLaunchedTime.getTime();
                    JSONObject tasks = stageJsonObj.getJSONObject("tasks");
                    for (Object taskObj : tasks.values()) {
                        JSONObject taskJsonObject = (JSONObject) taskObj;
                        stage.taskDurationList.add(taskJsonObject.getLong("duration"));
                    }
                    stage.setDiffMaxAvgPercent(calcDiffMaxAvgPercent(stage.getTaskDurationList()));
                    stageList.add(stage);
                }
            }
        }
        return stageList;
    }

    private BigDecimal calcDiffMaxAvgPercent(List<Long> taskDurationList) {
        Long maxTaskDur = 0L;
        Long sumTaskDur = 0L;
        for (Long taskDur : taskDurationList) {
            sumTaskDur += taskDur;
            maxTaskDur = Math.max(maxTaskDur, taskDur);
        }
        return BigDecimal.valueOf(maxTaskDur).divide(BigDecimal.valueOf(sumTaskDur), 2, RoundingMode.HALF_UP).movePointRight(2);
    }

    /**
     * [ {
     * "status" : "COMPLETE",
     * "stageId" : 2,
     * ...
     * } ,
     * {
     * "status" : "COMPLETE",
     * "stageId" : 1,
     * ...
     * } ,
     * ...
     * ]
     */
    private List<Integer> getStageIdList(String yarnId, String histServerUrl, Integer attemptId) {
        //http://hadoop102:8020/api/v1/applications/application_1684083580862_0012/1/stages
        String stageUrl = "http://" + histServerUrl + "/api/v1/applications/" + yarnId + "/" + attemptId + "/stages";
        String stageJson = HttpUtil.get(stageUrl);
        List<JSONObject> stageJsonArr = JSON.parseArray(stageJson, JSONObject.class);
        List<Integer> stageIdList = new ArrayList<>();
        for (JSONObject stageJsonObj : stageJsonArr) {
            if ("COMPLETE".equals(stageJsonObj.getString("status"))) {
                Integer stageId = stageJsonObj.getInteger("stageId");
                stageIdList.add(stageId);
            }
        }
        return stageIdList;
    }

    /**
     * {
     * "id" : "application_1684083580862_0012",
     * "name" : "Hive on Spark (sessionId = 43751104-1892-4c57-b018-d2fc4367d1fe)",
     * "attempts" : [ {
     * "attemptId" : "1",
     * "startTime" : "2023-05-23T15:59:50.648GMT",
     * "endTime" : "2023-05-23T16:15:57.342GMT",
     * "lastUpdated" : "2023-05-23T16:15:57.569GMT",
     * "duration" : 966694,
     * "sparkUser" : "atguigu",
     * "completed" : true,
     * "appSparkVersion" : "3.3.1",
     * "startTimeEpoch" : 1684857590648,
     * "endTimeEpoch" : 1684858557342,
     * "lastUpdatedEpoch" : 1684858557569
     * } ]
     * }
     */
    private Integer getAttemptId(String yarnId, String histServerUrl) {
        //http://hadoop102:18080/api/v1/applications/application_1684083580862_0012
        String appUrl = "http://" + histServerUrl + "/api/v1/applications/" + yarnId;
        String appJson = HttpUtil.get(appUrl);
        JSONArray attempts = JSON.parseObject(appJson).getJSONArray("attempts");
        for (Object attempt : attempts) {
            JSONObject attemptJsonObj = (JSONObject) attempt;
            if (attemptJsonObj.getBoolean("completed")) {
                return attemptJsonObj.getInteger("attemptId");
            }
        }
        return null;
    }


    @Data
    class Stage {
        Integer stageId;
        Long stageDuration;
        BigDecimal diffMaxAvgPercent;
        List<Long> taskDurationList = new ArrayList<>();
    }
}
