package Blood.SqlParse;


import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.ql.lib.*;
import org.apache.hadoop.hive.ql.parse.*;

import java.io.*;
import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

public class LineageUtils implements NodeProcessor {

    // 存放输入表
    TreeSet<String> inputTableList = new TreeSet<String>();

    // 存放目标表
    TreeSet<String> outputTableList = new TreeSet<String>();

    //存放with子句中的别名, 最终的输入表是 inputTableList减去withTableList
    TreeSet<String> withTableList = new TreeSet<String>();

    BlockingQueue<TableNode> blockingQueue = new LinkedBlockingQueue<>();

    public BlockingQueue<TableNode> getBlockingQueue(){
       return blockingQueue;
    }
    public TreeSet getInputTableList() {
        return inputTableList;
    }

    public TreeSet getOutputTableList() {
        return outputTableList;
    }

    public TreeSet getWithTableList() {
        return withTableList;
    }

    public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException {
        ASTNode pt = (ASTNode) nd;
//        System.out.println(pt.getToken().getType());
        switch (pt.getToken().getType()) {
            //create语句
            case HiveParser.TOK_CREATETABLE: {
                String createName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) pt.getChild(0));
                outputTableList.add(createName);
                break;
            }

            //insert语句
            case HiveParser.TOK_TAB: {
//                 System.out.println(pt.getChildCount() + "tab");
                String insertName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) pt.getChild(0));
                outputTableList.add(insertName);
                //  System.out.println("insertName  " + insertName);
                break;
            }

            //from语句
            case HiveParser.TOK_TABREF: {
                ASTNode tabTree = (ASTNode) pt.getChild(0);
                String fromName = (tabTree.getChildCount() == 1) ? BaseSemanticAnalyzer.getUnescapedName((ASTNode) tabTree.getChild(0)) : BaseSemanticAnalyzer.getUnescapedName((ASTNode) tabTree.getChild(0)) + "." + tabTree.getChild(1);
                inputTableList.add(fromName);
                break;
            }

            // with.....语句
            case HiveParser.TOK_CTE: {
                for (int i = 0; i < pt.getChildCount(); i++) {
                    ASTNode temp = (ASTNode) pt.getChild(i);
                    String cteName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) temp.getChild(1));
                    withTableList.add(cteName);
                }
                break;
            }

        }
        return null;
    }

    public void getLineageInfo(String query,String filePath) throws ParseException, SemanticException {
//        System.out.println(query);
        ParseDriver pd = new ParseDriver();
        ASTNode tree = null ;
        try {
            tree = pd.parse(query);
        }catch (Exception e ){
           e.printStackTrace();
        }
        while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
            tree = (ASTNode) tree.getChild(0);
        }
        inputTableList.clear();
        outputTableList.clear();
        withTableList.clear();

        Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>();

        Dispatcher disp = new DefaultRuleDispatcher(this, rules, null);
        GraphWalker ogw = new DefaultGraphWalker(disp);

        ArrayList topNodes = new ArrayList();
        topNodes.add(tree);
        ogw.startWalking(topNodes, null);
        TreeSet inputTableList = getInputTableList();
        TreeSet withTableList = getWithTableList();
        inputTableList.removeAll(withTableList);
        TableNode tn = new TableNode();
//        System.out.println("input tables" + inputTableList);
//        System.out.println("input tables" + getOutputTableList());
        try {
            tn.setExecute_platform("hive");
            tn.setPid(1);
            tn.setScript_path(filePath);
            tn.setTable_name(StringUtils.join(outputTableList,","));
            tn.setSource_table(StringUtils.join(inputTableList,","));
            blockingQueue.offer(tn,100, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    private static Map<String,String> keywordsRegexpEpress = new HashMap<>() ;
    static {
//        keywordsRegexpEpress.put("mysql.*?\\.","");
//        keywordsRegexpEpress.put("varchar\\(.*?\\)","string");
//        keywordsRegexpEpress.put("varchar","string");
          keywordsRegexpEpress.put("\\$\\{.*?\\}","test");
//          keywordsRegexpEpress.put("","'");

          keywordsRegexpEpress.put("[^`]\\btime\\b","`time`");
          keywordsRegexpEpress.put("[^`]\\bdate\\b[^(]","`date`");
    }
    private String deleteKeyword(String sql){
       String tmp = sql.toLowerCase() ;
       for(String regexp : keywordsRegexpEpress.keySet()){
          tmp = tmp.replaceAll(regexp,keywordsRegexpEpress.get(regexp));
       }
       return tmp ;
    }


    public void parseSql(String filePath) throws Exception{
        FileInputStream is = new FileInputStream(filePath);
        InputStreamReader isr = new InputStreamReader(is);
        BufferedReader br = new BufferedReader(isr);

        String str = null ;
        StringBuffer sql  = new StringBuffer();
        List<String> querys = new ArrayList<>();
        while (null != ( str = br.readLine())){
            if(!str.toLowerCase().trim().startsWith("set")
                    && !str.toLowerCase().trim().startsWith("use")
                    && !str.startsWith("--")
                    && !"".equals(str.replaceAll("(?m)^\\s*$(\\n|\\r\\n)", ""))){
                sql.append(str.replace(";","") + "\n");
            }
            if(str.contains(";") && !"".equals(sql.toString().replaceAll("(?m)^\\s*$(\\n|\\r\\n)", ""))){
                querys.add(sql.toString());
                sql.delete(0,sql.length());
            }
        }
        if(querys.size() == 0 && !sql.toString().equals("")){
           querys.add(sql.toString()) ;
        }
        querys.stream().filter( x -> !x.equals("") && x.contains("select")).forEach( x -> {
            try {
//                System.out.println(x);
                this.getLineageInfo(deleteKeyword(x),filePath);
           } catch (ParseException e){
                e.printStackTrace();
            } catch (SemanticException e) {
                e.printStackTrace();
            }
        });
    }
    //进行测试，sql语句是瞎写的，但是语法是对的
    public static void main(String[] args) throws Exception {
        LineageUtils l = new LineageUtils();
        l.parseSql("F:\\yh\\yc_data\\workflow\\biz\\dwb\\dwb_fct_order_detail_fin\\mid_fw_fct_order_channel_ordertype\\biz.sql");

        System.out.println(l.deleteKeyword("`time`"));
    }
}


