package com.central.dataManage.common.datasourcePlugin;

import com.central.common.model.ColumnInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.*;

import java.util.ArrayList;
import java.util.List;

/**
 * @author Tindy
 * @date 2021/7/6
 * @describe
 */
@Slf4j
public class HiveDdlParse {
    private static ParseDriver pd = new ParseDriver();
    public static String getTableComment(String ddl){
        try {
            ASTNode ast = pd.parse(ddl);
            //获取建表 node
            ArrayList<Node> children1 = ast.getChildren();
            ASTNode createNode=null;
            for (Node child : children1) {
                ASTNode child2= (ASTNode) child;
                if(child2.token.getType()==HiveParser.TOK_CREATETABLE){
                    createNode=child2;
                    break;
                }
            }
            //获取表注释node
            if(createNode!=null){
                ArrayList<Node> children2 = createNode.getChildren();
                for (Node child : children2) {
                    ASTNode child2= (ASTNode) child;
                    if(child2.token.getType()==HiveParser.TOK_TABLECOMMENT){
                        if(child2.getChildren().size()>0){
                            return BaseSemanticAnalyzer.getUnescapedName((ASTNode) child2.getChild(0));
                        }
                    }
                }
            }
            return null;
        } catch (ParseException e) {
            log.error("解析hive建表语句获取 TableComment 出错",e);
            return null;
        }
    }

    public static List<ColumnInfo> getColumnInfo(String ddl){
        try {
            List<ColumnInfo> columnInfos=new ArrayList<>();
            ASTNode ast = pd.parse(ddl);
            //获取建表 node
            ASTNode createNode=null;
            for (Node child1 : ast.getChildren()) {
                ASTNode child2= (ASTNode) child1;
                if(child2.token.getType()==HiveParser.TOK_CREATETABLE){
                    createNode=child2;
                    break;
                }
            }
            //获取表字段node
            if(createNode!=null){
                for (Node child1 : createNode.getChildren()) {
                    ASTNode child2= (ASTNode) child1;
                    switch (child2.token.getType()){
                        case HiveParser.TOK_TABCOLLIST://普通字段
                            for (Node child3 : child2.getChildren()) {
                            ASTNode child4= (ASTNode) child3;
                            if(child4.token.getType()==HiveParser.TOK_TABCOL){
                                ColumnInfo columnInfo = new ColumnInfo();
                                setColumnInfo(child4, columnInfo);
                                columnInfos.add(columnInfo);
                            }
                        }
                            break;
                        case HiveParser.TOK_TABLEPARTCOLS://分区字段
                            ASTNode child3= (ASTNode)child2.getChild(0);
                            for (Node child4 : child3.getChildren()) {
                                ASTNode child5= (ASTNode) child4;
                                if(child5.token.getType()==HiveParser.TOK_TABCOL){
                                    ColumnInfo columnInfo = new ColumnInfo();
                                    columnInfo.setIsPartition(true);
                                    setColumnInfo(child5, columnInfo);
                                    columnInfos.add(columnInfo);
                                }
                            }
                            break;
                    }
                }
            }
            return columnInfos;
        } catch (ParseException e) {
            log.error("解析hive建表语句获取 ColumnInfo 出错",e);
            return null;
        }
    }

    private static void setColumnInfo(ASTNode child1, ColumnInfo columnInfo) {
        for (Node child2 : child1.getChildren()) {
            ASTNode child3 = (ASTNode) child2;
            String args = "";
            switch (child3.getToken().getType()) {
                case HiveParser.Identifier://字段名
                    columnInfo.setColumnName(BaseSemanticAnalyzer.getUnescapedName(child3));
                    break;
                case HiveParser.StringLiteral://字段注释
                    columnInfo.setColumnComment(BaseSemanticAnalyzer.getUnescapedName(child3));
                    break;
                //字段类型
                case HiveParser.TOK_STRING://string 类型
                    columnInfo.setDataType("string");
                    break;
                case HiveParser.TOK_TINYINT://tinyint 类型
                    columnInfo.setDataType("tinyint");
                    break;
                case HiveParser.TOK_SMALLINT://smallint 类型
                    columnInfo.setDataType("smallint");
                    break;
                case HiveParser.TOK_INT://int 类型
                    columnInfo.setDataType("int");
                    break;
                case HiveParser.TOK_BIGINT://bigint 类型
                    columnInfo.setDataType("bigint");
                    break;
                case HiveParser.TOK_BOOLEAN://boolean 类型
                    columnInfo.setDataType("boolean");
                    break;
                case HiveParser.TOK_FLOAT://float 类型
                    columnInfo.setDataType("float");
                    break;
                case HiveParser.TOK_DOUBLE://double 类型
                    columnInfo.setDataType("double");
                    break;
                case HiveParser.TOK_DECIMAL://decimal 类型
                    columnInfo.setDataType("decimal");
                    args = "(" + BaseSemanticAnalyzer.getUnescapedName((ASTNode) child3.getChild(0))
                            + "," + BaseSemanticAnalyzer.getUnescapedName((ASTNode) child3.getChild(1)) + ")";
                    columnInfo.setArgs(args);
                    break;
                case HiveParser.TOK_TIMESTAMP://timestamp 类型
                    columnInfo.setDataType("timestamp");
                    break;
                case HiveParser.TOK_DATE://date 类型
                    columnInfo.setDataType("date");
                    break;
                case HiveParser.TOK_CHAR://char 类型
                    columnInfo.setDataType("char");
                    args = "(" + BaseSemanticAnalyzer.getUnescapedName((ASTNode) child3.getChild(0)) + ")";
                    columnInfo.setArgs(args);
                    break;
                case HiveParser.TOK_VARCHAR://varchar 类型
                    columnInfo.setDataType("varchar");
                    args = "(" + BaseSemanticAnalyzer.getUnescapedName((ASTNode) child3.getChild(0)) + ")";
                    columnInfo.setArgs(args);
                    break;
                case HiveParser.TOK_LIST://array 类型
                    columnInfo.setDataType("array");
                    break;
                case HiveParser.TOK_MAP://map 类型
                    columnInfo.setDataType("map");
                    break;
                case HiveParser.TOK_STRUCT://struct 类型
                    columnInfo.setDataType("struct");
                    break;
                default:
                    break;
            }
        }
    }
}
