package com.atguigu.dga.governance.assessor.calc;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.dga.common.SqlParser;
import com.atguigu.dga.governance.assessor.Assessor;
import com.atguigu.dga.governance.bean.AssessParam;
import com.atguigu.dga.governance.bean.GovernanceAssessDetail;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import com.google.common.collect.Sets;
import lombok.Getter;
import lombok.Setter;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.springframework.stereotype.Component;

import java.math.BigDecimal;
import java.util.*;

@Component("IS_SIMPLE_PROCESS")
public class IsSimpleProcessAssessor extends Assessor {


    //1   sql    任务的定义
    //2   构建一个节点处理器
    //        2.1   尝试采集sql中的复杂操作包括： join  group by  union  function
    //         2.2  尝试采集 所有where 语句中涉及的字段
    //         2.3   采集被查询的表的名称 库名.表名
    //3   利用sql解析工具把sql 和处理器传入 ，遍历完成，
    //获得 处理中已经采集的信息
    // 4    根据采集信息判断sql性质
    //        4.1  判断是否有复杂操作 ，只要有直接定性为复杂sql
    //        4.2  如果没有复杂操作   获得被查询的表 通过元数据找到对应分区字段信息   用分区字段信息和 where涉及的字段进行比对  只要有一个字段不是分区字段，认定为复杂sql
    // 5    只要是简单sql就给0分

    @Override
    protected void assessProblem(GovernanceAssessDetail governanceAssessDetail, AssessParam assessParam) throws Exception {
        //1  获取 sql    任务的定义
       if(assessParam.getTDsTaskDefinition()==null  ){
           return;
       }
        String sql = assessParam.getTDsTaskDefinition().getSql();

        SimpleDispatcher simpleDispatcher = new SimpleDispatcher();
        simpleDispatcher.setDefaultSchemaName(assessParam.getTableMetaInfo().getSchemaName());

        SqlParser.parse(sql,simpleDispatcher);


        //2   构建一个节点处理器
        //        2.1   尝试采集sql中的复杂操作包括： join  group by  union  function
        //         2.2  尝试采集 所有where 语句中涉及的字段
        //         2.3   采集被查询的表的名称 库名.表名
        //3   利用sql解析工具把sql 和处理器传入 ，遍历完成，
        SqlParser.parse(sql,simpleDispatcher);
        Set<String> fromTableSet = simpleDispatcher.getFromTableSet();
        Set<String> whereFieldSet = simpleDispatcher.getWhereFieldSet();
        Set<String> complicateOperatorSet = simpleDispatcher.getComplicateOperatorSet();
        //获得 处理中已经采集的信息
        // 4    根据采集信息判断sql性质
        boolean isSimple=true;
        //        4.1  判断是否有复杂操作 ，只要有直接定性为复杂sql
        if(complicateOperatorSet.size()>0){
            isSimple=false; //复杂操作 非简单加工
        }else{
            List<TableMetaInfo> tableMetaInfoAllList = assessParam.getTableMetaInfoAllList();
            //        4.2  如果没有复杂操作   获得被查询的表 通过元数据找到对应分区字段信息   用分区字段信息和 where涉及的字段进行比对  只要有一个字段不是分区字段，认定为复杂sql
            for (String tableNameWithSchema : fromTableSet) {
                // 去查询元数据
                TableMetaInfo tableMetaInfo = assessParam.getTableMetaInfoAllMap().get(tableNameWithSchema);
                if(tableMetaInfo!=null){
                    String partitionColNameJson = tableMetaInfo.getPartitionColNameJson();
                    List<JSONObject> partitionJsonObjList = JSON.parseArray(partitionColNameJson, JSONObject.class);
                    if(partitionJsonObjList!=null &&partitionJsonObjList.size()>0){
                        int partitionFieldCount=0;
                        for (JSONObject partitionJsonObj : partitionJsonObjList) {
                           if( whereFieldSet.contains(partitionJsonObj.getString("name"))){
                               partitionFieldCount++;
                           }
                        }
                        if(partitionFieldCount!=whereFieldSet.size()){  //说明至少有一个不是分区字段
                            isSimple=false;
                        }
                    }
                }
            }


        }



        // 5    只要是简单sql就给0分
        if(isSimple){
            governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
            governanceAssessDetail.setAssessProblem("简单加工");
        }else{
            governanceAssessDetail.setAssessComment("复杂处理："+ StringUtils.join(complicateOperatorSet,",") );
        }

    }

    //2   构建一个节点处理器
    static class SimpleDispatcher implements Dispatcher {

        @Getter
        private Set<String>  complicateOperatorSet=new HashSet<>();

        @Getter
        private Set<String>  whereFieldSet=new HashSet<>();

        @Getter
        private Set<String>  fromTableSet=new HashSet<>();

        @Setter
        private String defaultSchemaName ;

        Set<Integer>   complicateOperatorPatternSet = Sets.newHashSet(HiveParser.TOK_JOIN,  //join 包含通过where 连接的情况
                HiveParser.TOK_GROUPBY,       //  group by
                HiveParser.TOK_LEFTOUTERJOIN,       //  left join
                HiveParser.TOK_RIGHTOUTERJOIN,     //   right join
                HiveParser.TOK_FULLOUTERJOIN,     // full join
                HiveParser.TOK_FUNCTION,     //count(1)
                HiveParser.TOK_FUNCTIONDI,  //count(distinct xx)
                HiveParser.TOK_FUNCTIONSTAR, // count(*)
                HiveParser.TOK_SELECTDI,  // distinct
                HiveParser.TOK_UNIONALL   // union
        );

        Set<String>  conditionOperatorPatternSet=Sets.newHashSet("=","<=",">=","<",">","like");



        @Override
        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
            ASTNode astNode = (ASTNode) nd;
            //        2.1   尝试采集sql中的复杂操作包括： join  group by  union  function
            if(complicateOperatorPatternSet.contains(astNode.getType()) )  {
                complicateOperatorSet.add(astNode.getText());
            }

            if(astNode.getType()==HiveParser.TOK_WHERE){
                System.out.println(111);
            }



            //         2.2  尝试采集 所有where 语句中涉及的字段
            //    以 HiveParser.TOK_TABLE_OR_COL 为基准点
            //  如果父级是 DOT  取基准点的弟弟 （父节点的第二个子节点）
            //  如果不是DOT  取自己的子节点
            if(astNode.getType()==HiveParser.TOK_TABLE_OR_COL && astNode.getAncestor(HiveParser.TOK_WHERE)!=null) {
                if(astNode.getParent().getType()==HiveParser.DOT){
                    Tree fieldNode = astNode.getParent().getChild(1);
                    whereFieldSet.add(fieldNode.getText());
                }else{
                    Tree fieldNode = astNode.getChild(0);
                    whereFieldSet.add(fieldNode.getText());
                }

            }




//            if(conditionOperatorPatternSet.contains(astNode.getText()) ) {
//                ArrayList<Node> childrenNodeList = astNode.getChildren();
//                for (Node childNode : childrenNodeList) {
//                    ASTNode childASTNode = (ASTNode) childNode;
//                    if(childASTNode.getType()==HiveParser.TOK_TABLE_OR_COL){  //说明是字段
//                         ASTNode  fieldNode = (ASTNode) childASTNode.getChild(0);
//                        whereFieldSet.add(fieldNode.getText()) ;  //采集字段名
//
//                    } else if (childASTNode.getType() == HiveParser.DOT) {  //有点 说明是带表名的字段
//                        ASTNode fieldNode = (ASTNode) childASTNode.getChild(1);
//                        whereFieldSet.add(fieldNode.getText()) ;  //采集字段名
//                    }
//                }
//            }

            // 2.3   采集被查询的表的名称 库名.表名
            if(astNode.getType()==HiveParser.TOK_TABREF){
                ASTNode tableNameAllNode =(ASTNode) astNode.getChild(0);
                if(tableNameAllNode.getChildren().size()==1){  //不含库名的表名
                    Tree tableNameNode = tableNameAllNode.getChild(0);
                    fromTableSet.add(defaultSchemaName+"."+tableNameNode.getText());
                }else if(tableNameAllNode.getChildren().size()==2){   // 含库名的表名
                    Tree schemaNameNode = tableNameAllNode.getChild(0);
                    Tree tableNameNode = tableNameAllNode.getChild(1);
                    fromTableSet.add(schemaNameNode.getText()+"."+tableNameNode.getText());
                }


            }



            return null;
        }
    }

    public static void main(String[] args) throws Exception {
        String sql1="insert overwrite table gmall.dwd_user_login_inc partition (dt = '${do_date}')\\nselect user_id,\\n       date_format(from_utc_timestamp(ts, 'GMT+8'), 'yyyy-MM-dd')          date_id,\\n       date_format(from_utc_timestamp(ts, 'GMT+8'), 'yyyy-MM-dd HH:mm:ss') login_time,\\n       channel,\\n       area_code                                                           province_id,\\n       version_code,\\n       mid_id,\\n       brand,\\n       model,\\n       operate_system\\nfrom (\\n         select user_id,\\n                channel,\\n                area_code,\\n                version_code,\\n                mid_id,\\n                brand,\\n                model,\\n                operate_system,\\n                ts\\n         from (\\n                  select user_id,\\n                         channel,\\n                         area_code,\\n                         version_code,\\n                         mid_id,\\n                         brand,\\n                         model,\\n                         operate_system,\\n                         ts,\\n                         row_number() over (partition by session_id order by ts) rn\\n                  from (select common.uid user_id,\\n                               common.ch  channel,\\n                               common.ar  area_code,\\n                               common.vc  version_code,\\n                               common.mid mid_id,\\n                               common.ba  brand,\\n                               common.md  model,\\n                               common.os  operate_system,\\n                               common.sid session_id,\\n                               ts\\n                        from gmall.ods_log_inc\\n                        where dt = '${do_date}'\\n                          and page is not null\\n                       ) t1\\n                  where a.user_id is not null\\n              ) t2\\n         where rn = 1\\n     ) t3";
          sql1 = sql1.replace("\\n", " ");
        SimpleDispatcher simpleDispatcher = new SimpleDispatcher();
        simpleDispatcher.setDefaultSchemaName("gmall");

        SqlParser.parse(sql1,simpleDispatcher);
        Set<String> fromTableSet = simpleDispatcher.getFromTableSet();
        Set<String> whereFieldSet = simpleDispatcher.getWhereFieldSet();
        Set<String> complicateOperatorSet = simpleDispatcher.getComplicateOperatorSet();

        System.out.println(11111);

    }
}
