package com.atguigu.dga.governance.assessor.calc;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.dga.governance.assessor.Assessor;
import com.atguigu.dga.governance.bean.AssessParam;
import com.atguigu.dga.governance.bean.GovernanceAssessDetail;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.atguigu.dga.util.SqlUtil;
import com.google.common.collect.Sets;
import lombok.Getter;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.math.BigDecimal;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @Description: Sql是否简单加工指标
 * sql语句没有任何join\groupby\\union\函数的复杂计算， 且 where过滤字段皆为分区字段，则视为简单加工，给0分，其余给10分
 * @Author: lay
 * @Date: 2024/7/1 16:06
 */
@Component("TABLE_SQL_SIMPLE_PROCESS")
public class TableSqlSimpleProcessAssessor extends Assessor {
    
    @Override
    public void checkProblem(GovernanceAssessDetail governanceAssessDetail, AssessParam assessParam) throws Exception {
        String taskSql = assessParam.getTDsTaskDefinition().getTaskSql();

        SimpleSqlDispatcher simpleSqlDispatcher = new SimpleSqlDispatcher();
        SqlUtil.parseSql(taskSql, simpleSqlDispatcher);
        
        if (simpleSqlDispatcher.getSqlHasComplicateTokSet().size() != 0){
            //考评备注
            governanceAssessDetail.setAssessComment("Sql中包含的复杂计算: " + simpleSqlDispatcher.getSqlHasComplicateTokSet());
            return ;
        }

        //维护所有表
        List<TableMetaInfo> tableMetaInfoList = assessParam.getTableMetaInfoList();
        Map<String, TableMetaInfo> tableMetaInfoMap = new HashMap<>();
        for (TableMetaInfo tableMetaInfo : tableMetaInfoList) {
            tableMetaInfoMap.put(tableMetaInfo.getSchemaName() + tableMetaInfo.getTableName(), tableMetaInfo);
        }

        //taskSql中用到的字段
        Set<String> sqlTableNameSet = simpleSqlDispatcher.getSqlTableNameSet();
        //定义集合， 维护所有被查询表的分区字段
        HashSet<String> allPartitionColNameList = new HashSet<>();
        for (String tableName : sqlTableNameSet) {
            //提取该表的分区字段
            TableMetaInfo tableMetaInfo = tableMetaInfoMap.get(tableName);
            if (tableMetaInfo != null){
                String partitionColNameJson = tableMetaInfo.getPartitionColNameJson();
                List<JSONObject> jsonObjectList = JSON.parseArray(partitionColNameJson, JSONObject.class);
                //只保留分区字段名字
                List<String> partitionColNameList = jsonObjectList.stream().map(jsonObj -> jsonObj.getString("name")).collect(Collectors.toList());
                //保存到集合中
                allPartitionColNameList.addAll(partitionColNameList);
            }
        }

        Set<String> sqlWhereColSet = simpleSqlDispatcher.getSqlWhereColSet();
        //CollectionUtils.subtract()方法：从一个集合中减去（移除）另一个集合中包含的所有元素。
        //这个方法不会改变原始集合，而是返回一个新的集合，其中包含了原始集合中减去指定集合后的元素。
        //Collection<Integer> list1 = Arrays.asList(1, 2, 3, 4, 5);
        //Collection<Integer> list2 = Arrays.asList(2, 4);
        //Collection<Integer> result = CollectionUtils.subtract(list1, list2);
        //System.out.println(result); // 输出：[1, 3, 5] 
        Collection subtract = CollectionUtils.subtract(sqlWhereColSet, allPartitionColNameList);
        if (subtract.size() > 0){
            //考评备注
            governanceAssessDetail.setAssessComment("Sql中非分区字段的过滤: " + subtract);
            return;
        }

        //简单加工
        governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
        //问题项
        governanceAssessDetail.setAssessProblem("Sql为简单加工");
        //考评备注
        governanceAssessDetail.setAssessComment("Sql中的复杂计算:" +  simpleSqlDispatcher.getSqlHasComplicateTokSet()  + " , Sql中的过滤字段: " +sqlWhereColSet  + ", 被查询表的分区字段: " +  allPartitionColNameList );

    }

    private static class SimpleSqlDispatcher implements Dispatcher {

        //复杂计算的集合
        Set<Integer> complicateTokSet = Sets.newHashSet(
                HiveParser.TOK_JOIN ,   // join ,包含通过where连接的情况
                HiveParser.TOK_GROUPBY , // group by
                HiveParser.TOK_LEFTOUTERJOIN , // left join
                HiveParser.TOK_RIGHTOUTERJOIN , //right join
                HiveParser.TOK_FULLOUTERJOIN , // full join
                HiveParser.TOK_FUNCTION , // count(1)
                HiveParser.TOK_FUNCTIONDI, // count(distinct xx)
                HiveParser.TOK_FUNCTIONSTAR , // count(*)
                HiveParser.TOK_SELECTDI , // distinct
                HiveParser.TOK_UNIONALL // union
        );
        
        //sql中的实际包含的复杂操作
        @Getter
        Set<String> sqlHasComplicateTokSet = new HashSet<>();

        //where的条件操作
        Set<Integer> operatorSet = Sets.newHashSet(
                HiveParser.EQUAL,   // =
                HiveParser.GREATERTHAN, // >
                HiveParser.LESSTHAN, // <
                HiveParser.GREATERTHANOREQUALTO, // >=
                HiveParser.LESSTHANOREQUALTO, // <=
                HiveParser.NOTEQUAL, // <>
                HiveParser.KW_LIKE // like
        );
        
        @Getter
        Set<String> sqlTableNameSet = new HashSet<>();
        @Getter
        Set<String> sqlWhereColSet = new HashSet<>();

        @Value("${default.database}")
        private String defaultDatabase;
        
        @Override
        public Object dispatch(Node node, Stack<Node> stack, Object... objects) throws SemanticException {
            ASTNode astNode = (ASTNode) node;
            //判断有没有复杂操作
            if (complicateTokSet.contains(astNode.getType())){
                sqlHasComplicateTokSet.add(astNode.getText());
            }
            //获取where语句中的筛选字段
            if (operatorSet.contains(astNode.getType()) && astNode.getAncestor(HiveParser.TOK_WHERE) != null){
                if (astNode.getChild(0).getType() == HiveParser.DOT){
                    // a.id = b.id
                    sqlWhereColSet.add(astNode.getChild(0).getChild(1).getText());
                } else if (astNode.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL){
                    // id = xx
                    sqlWhereColSet.add(astNode.getChild(0).getChild(0).getText());
                }
            }
            //获取所有来源表
            if(astNode.getType() == HiveParser.TOK_TABNAME && astNode.getAncestor(HiveParser.TOK_FROM) != null){
                if (astNode.getChildren().size() == 2){
                    sqlTableNameSet.add(astNode.getChild(0).getText() + astNode.getChild(1).getText());
                } else if (astNode.getChildren().size() == 1){
                    sqlTableNameSet.add(defaultDatabase + astNode.getChild(0).getText());
                }
            }
            return null;
        }
    }
}
