package com.atguigu.dga.governance.util;

import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.*;

import java.util.Collection;
import java.util.Collections;
import java.util.Stack;

public class SqlParser {
    public static void sqlParse(Dispatcher dispatcher, String sql) {

        //解析sql的驱动
        ParseDriver parseDriver = new ParseDriver();

        try {
            ASTNode astNode = parseDriver.parse(sql);

            while (astNode.getToken() == null || astNode.getToken().getType() != HiveParser.TOK_QUERY) {
                astNode = (ASTNode) astNode.getChild(0);
            }

            //定义遍历器
            GraphWalker graphWalker = new DefaultGraphWalker(dispatcher);

            //开始遍历
            graphWalker.startWalking(Collections.singletonList(astNode), null);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private static class MyDispatcher implements Dispatcher {
        @Override
        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
            ASTNode astNode = (ASTNode) nd;
            System.out.println(astNode.getType() + ":" + astNode.getToken().getType() + ":" + astNode.getText());
            return null;
        }
    }

    //测试
    public static void main(String[] args) {
        String sql = "select a,b,c from oredr_info where id=1";
        sqlParse(new MyDispatcher(), sql);
    }
}
