package com.atguigu.dga230301.governance.util;

import org.antlr.runtime.tree.Tree;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.*;

import java.util.Collections;
import java.util.Stack;

public class SqlParser {
    public static void sqlParse(String sql, Dispatcher dispatcher) throws ParseException, SemanticException {

        //1.获取到sql的解析驱动
        ParseDriver parseDriver = new ParseDriver();

        //2.解析sql
        ASTNode astNode = parseDriver.parse(sql);

        //3.获取到不为空的query节点
        while (astNode.getToken() == null || astNode.getToken().getType() != HiveParser.TOK_QUERY) {
            astNode = (ASTNode) astNode.getChild(0);
        }

        //4.遍历器
        DefaultGraphWalker graphWalker = new DefaultGraphWalker(dispatcher);
        graphWalker.startWalking(Collections.singleton(astNode),null);
    }

    public static void main(String[] args) throws ParseException, SemanticException {
        String sql = "select a,b,c from oredr_info where id = 1";
        sqlParse(sql,new MyDis());
    }

    private static class MyDis implements Dispatcher{

        @Override
        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
            ASTNode astNode = (ASTNode) nd;
            System.out.println(astNode.getType()+":"+astNode.getText()+"-----------"+astNode.getToken().getType()+":"+astNode.getToken().getText());
            return null;
        }
    }
}
