package com.atguigu.dwm.common.util;

import lombok.Getter;
import lombok.Setter;
import org.antlr.runtime.Token;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;

import java.util.HashSet;
import java.util.Set;
import java.util.Stack;

public class SourceTableDispatcher implements Dispatcher {

    @Setter
    String defaultSchemaName;

    @Getter
    Set sourceTableSet=new HashSet();

    @Getter
    Set subqueryTableSet=new HashSet();

    @Override
    public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
        ASTNode astNode = (ASTNode) nd;
        //收集所有被查询的表集合
        if(astNode.getToken().getType()== HiveParser.TOK_TABREF  ){
             ASTNode tokTableNameNode=   (ASTNode)astNode.getChild(0);
             if(tokTableNameNode.getChildCount()==2){
                 String schemaName=tokTableNameNode.getChild(0).getText();
                 String tableName=tokTableNameNode.getChild(1).getText();
                 sourceTableSet.add(schemaName+"."+tableName);
             }else {
                 String schemaName=this.defaultSchemaName;
                 String tableName=tokTableNameNode.getChild(0).getText();
                 sourceTableSet.add(schemaName+"."+tableName);
             }
        }

        //收集所有子查询的表集合
        if(astNode.getToken().getType()== HiveParser.TOK_SUBQUERY  ){
            if(astNode.getChildCount()==2){
                String schemaName=this.defaultSchemaName;
                String tableName=astNode.getChild(1).getText();
                subqueryTableSet.add(schemaName+"."+tableName);
            }
        }

        return null;
    }
}
