package com.atguigu.dga.governance.assessor.calc;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.dga.constant.DgaConstant;
import com.atguigu.dga.governance.assessor.Assessor;
import com.atguigu.dga.governance.bean.AssessParam;
import com.atguigu.dga.governance.bean.GovernanceAssessDetail;
import com.atguigu.dga.util.HttpClientUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;

/**
 * @Description: 计算中是否存在数据倾斜指标
 * 检查是否有数据倾斜，如果某个stage的最大任务耗时超过平均任务耗时的{percent}%, 只检查耗时超过{stage_dur_seconds}秒的stage。
 * 存在倾斜给0分，不存在给10分。
 * @Author: lay
 * @Date: 2024/7/1 20:19
 */
@Component("TABLE_DATA_SKEW")
public class TableDataSkewAssessor extends Assessor {
    
    @Value("${spark.historyServer.url}")
    private String sparkHistoryServerUrl;
    
    @Override
    public void checkProblem(GovernanceAssessDetail governanceAssessDetail, AssessParam assessParam) throws Exception {
        //排除ODS的表
        if(DgaConstant.DW_LEVEL_ODS.equals( assessParam.getTableMetaInfo().getTableMetaInfoExtra().getDwLevel())){
            return ;
        }
        //获取参数
        String paramsJson = assessParam.getGovernanceMetric().getMetricParamsJson();
        JSONObject jsonObject = JSONObject.parseObject(paramsJson);
        Integer paramPercent = jsonObject.getInteger("percent");
        Integer paramStageDurSeconds = jsonObject.getInteger("stage_dur_seconds");

        String yarnId = assessParam.getTDsTaskInstance().getAppLink();
        
        String completedAttemptId = getAttemptId(yarnId);
        
        List<Integer> stageIdList = getStageId(yarnId, completedAttemptId);

        List<Stage> stageList = getStageList(yarnId , completedAttemptId , stageIdList );
        List<Stage> dataSkewStageList = new ArrayList<>();
        for (Stage stage : stageList) {
            Integer maxTaskDuration = stage.getMaxTaskDuration();
            Integer realPercent = stage.getRealPercent();
            if (maxTaskDuration > paramStageDurSeconds) {
                if (realPercent > paramPercent) {
                    dataSkewStageList.add(stage);
                }
            }
        }
        if (dataSkewStageList.size() > 0){
            //存在倾斜
            //给分
            governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
            //问题项
            governanceAssessDetail.setAssessProblem("计算中存在数据倾斜");
        }
        //考评备注
        governanceAssessDetail.setAssessComment("所有的阶段: " + stageList + " , 存在倾斜的阶段: " + dataSkewStageList);        
    }

    /**
     * 将每个阶段中的task信息处理后， 封装成一个自定义的Stage对象
     * @param yarnId
     * @param completedAttemptId
     * @param stageIdList
     * @return
     */
    private List<Stage> getStageList(String yarnId, String completedAttemptId, List<Integer> stageIdList) {
        //http://hadoop102:18080/api/v1/applications/{yarnId}/{attemptId}/stages/{stageId}
        List<Stage> stageList = new ArrayList<>();
        for (Integer stageId : stageIdList) {
            String url = sparkHistoryServerUrl + "/" + yarnId + "/" + completedAttemptId + "/stages/" + stageId;
            String result = HttpClientUtil.get(url);
            List<JSONObject> stageJSONArray = JSON.parseArray(result, JSONObject.class);
            //最大任务耗时
            Integer maxTaskDuration = Integer.MIN_VALUE ;
            //总任务耗时
            Integer sumTaskDuration = 0 ;
            //task个数
            Integer taskCount = 0;
            for (JSONObject jsonObject : stageJSONArray) {
                if ("COMPLETE".equals(jsonObject.getString("status"))){
                    JSONObject tasksJsonObj = jsonObject.getJSONObject("tasks");
                    Set<String> allTaskKeys = tasksJsonObj.keySet();
                    for (String taskKey : allTaskKeys) {
                        JSONObject taskJsonObject = tasksJsonObj.getJSONObject(taskKey);
                        if ("SUCCESS".equals(taskJsonObject.getString("status"))){
                            Integer duration = taskJsonObject.getInteger("duration");
                            maxTaskDuration = Math.max(maxTaskDuration, duration);
                            sumTaskDuration += duration;
                            taskCount++;
                        }
                    }
                }
            }
            Stage stage = new Stage();
            stage.setId(stageId);
            stage.setMaxTaskDuration(maxTaskDuration);
            if (taskCount == 1){
                stage.setAvgTaskDuration(maxTaskDuration);
                stage.setRealPercent(0);
            } else {
                Integer avgTaskDuration = (sumTaskDuration - maxTaskDuration) / (taskCount - 1);
                stage.setAvgTaskDuration(avgTaskDuration);
                stage.setRealPercent((maxTaskDuration - avgTaskDuration) * 100 / avgTaskDuration);
            }
            stageList.add(stage);
        }
        return stageList;
    }

    /**
     * 获取stageId
     * @param appLink
     * @param attemptId
     * @return
     */
    private List<Integer> getStageId(String appLink, String attemptId) {
        //http://hadoop102:18080/api/v1/applications/{yarnId}/{attemptId}/stages
        String url = sparkHistoryServerUrl + "/" + appLink + "/" + attemptId + "/stages";
        String result = HttpClientUtil.get(url);
        List<JSONObject> jsonObjectList = JSON.parseArray(result, JSONObject.class);
        
        List<Integer> stageIdList = new ArrayList<>();
        for (JSONObject jsonObject : jsonObjectList) {
            if ("COMPLETE".equals(jsonObject.getString("status"))){
                stageIdList.add(jsonObject.getInteger("stageId"));
            }
        }
        return stageIdList;
    }

    /**
     * 获取attemptId
     *
     * @param appLink
     * @return
     */
    private String getAttemptId(String appLink) {
        //http://hadoop102:18080/api/v1/applications/{yarnId}
        String url = sparkHistoryServerUrl + "/" + appLink;
        String result = HttpClientUtil.get(url);
        JSONObject jsonObject = JSONObject.parseObject(result);
        String attempts = jsonObject.getString("attempts");
        List<JSONObject> jsonObjectList = JSON.parseArray(attempts, JSONObject.class);
        
        for (JSONObject object : jsonObjectList) {
            if (object.getBoolean("completed")){
                return object.getString("attemptId");
            }
        }
        return null;
    }

    @Data
    @AllArgsConstructor
    @NoArgsConstructor
    public class Stage{

        private Integer  id ;

        private Integer maxTaskDuration ;

        private Integer avgTaskDuration ;

        private Integer realPercent ;
    }
}
