package com.atguigu.dga.common.utils;

import org.antlr.runtime.tree.Tree;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException;

import java.util.Collections;
import java.util.Stack;

public class SqlParser {


    //1   建立语法树工具
    //2  利用工具把SQL解析为语法树
    //3  利用遍历器遍历整个语法树  ，向遍历器植入一个节点处理器
    //4  遍历结束后 提取节点处理器中的容器即可
    public static void parseSQL(String sql, Dispatcher dispatcher) throws  Exception {

        //1   建立语法树工具
        ParseDriver parseDriver = new ParseDriver();

        //2  利用工具把SQL解析为语法树
        ASTNode astNode = parseDriver.parse(sql);
        //2.1 去掉头结点
        ASTNode astNodeQuery =(ASTNode) astNode.getChild(0);
        //3  利用遍历器遍历整个语法树  ，向遍历器植入一个节点处理器
        DefaultGraphWalker defaultGraphWalker = new DefaultGraphWalker(dispatcher);
         //遍历
        defaultGraphWalker.startWalking(Collections.singletonList(astNodeQuery)    , null);

    }

    //自定义节点处理器
   static class MyDispatcher implements Dispatcher {

       @Override
       public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
           // 处理事务
           ASTNode astNode = (ASTNode) nd;
           System.out.println("nd.getName() = " + astNode.getText());
            return  null;
       }
   }

    public static void main(String[] args) throws Exception {
        String sql="select  a,b,c from t1 where b='abc'";
        SqlParser.parseSQL(sql,new MyDispatcher());

    }

}
