package com.atguigu.dga.common;


import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.*;
import org.datanucleus.store.types.wrappers.backed.Collection;

import java.util.Collections;
import java.util.Stack;



public class SqlParser {


    // 方法

    public static  void  parse(String sql, Dispatcher dispatcher) throws  Exception {
        // 1  创建 解析器工具   parseDiver
        ParseDriver parseDriver = new ParseDriver();
        // 2  利用工具解析为抽象语法树
        ASTNode astNode = parseDriver.parse(sql);
        //3 遍历操作
        astNode=(ASTNode) astNode.getChild(0);

        DefaultGraphWalker graphWalker = new DefaultGraphWalker(dispatcher);
        //4  启动遍历
        graphWalker.startWalking(Collections.singletonList(astNode) ,null);

    }


    public static void main(String[] args) throws Exception {

        String sql="select a,b,c from gmall.order_info oi join order_detail od on oi.id=od.id    where a=102";
        MyDispatcher myDispatcher = new MyDispatcher();
        parse(sql,myDispatcher);
        //提取结果
        System.out.println("myDispatcher.hasJoin = " + myDispatcher.hasJoin);

    }

   static class MyDispatcher  implements  Dispatcher{

        boolean hasJoin =false;

        @Override
        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
            ASTNode astNode = (ASTNode) nd;
            System.out.println("nd = " + astNode.getText());
            if(astNode.getType()== HiveParser.TOK_JOIN){
                hasJoin=true;
            }

            return null;
        }
    }
}
