package com.archgeek.bigdata;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Stack;

/**
 * hive sql
 */
public class HiveSqlParse {
    private static final Logger LOG = LoggerFactory.getLogger(HiveSqlParse.class);

    private Stack<String> tableNameStack = new Stack<String>();
    private Stack<Oper> operStack = new Stack<Oper>();
    private String nowQueryTable = "";
    public static final String NEW_DB_TB_NAME = "new_db_tb_name";
    public static final String SRC_DB_TB_NAME = "src_db_tb_name";
    private Oper oper;

    private enum Oper {
        SELECT, INSERT, DROP, TRUNCATE, LOAD, CREATETABLE, ALTER, DROPTABLE, DELETE, LIMIT
    }

    public Set<String> parseIteral(ASTNode ast) {
        Set<String> set = new HashSet<String>();
        prepareToParseCurrentNodeAndChilds(ast);
        set.addAll(parseChildNodes(ast));
        set.addAll(parseCurrentNode(ast, set));
        endParseCurrentNode(ast);
        if (set.size() > 0) {
            LOG.info("set values: " + set);
        }
        return set;
    }

    private void endParseCurrentNode(ASTNode ast) {
        if (ast.getToken() != null) {
            switch (ast.getToken().getType()) {
                case HiveParser.TOK_RIGHTOUTERJOIN:
                case HiveParser.TOK_LEFTOUTERJOIN:
                case HiveParser.TOK_JOIN:
                    break;
                case HiveParser.TOK_QUERY:
                    break;
                case HiveParser.TOK_INSERT:
                case HiveParser.TOK_SELECT:
                    nowQueryTable = tableNameStack.pop();
                    oper = operStack.pop();
                    break;
                case HiveParser.TOK_CREATETABLE:
                    nowQueryTable = tableNameStack.pop();
                    oper = operStack.pop();
                    break;
            }
        }
    }

    private Set<String> parseCurrentNode(ASTNode ast, Set<String> set) {
        if (ast.getToken() != null) {
            switch (ast.getToken().getType()) {
                case HiveParser.TOK_TABLE_PARTITION:
                    if (ast.getChildCount() != 2) {
                        String table = BaseSemanticAnalyzer
                                .getUnescapedName((ASTNode) ast.getChild(0));
                        if (oper == Oper.SELECT) {
                            nowQueryTable = table;
                        }
                        set.add(table + "|" + oper);
                    }
                    break;

                case HiveParser.TOK_TAB:// outputTable
                    String tableTab = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    if (oper == Oper.SELECT) {
                        nowQueryTable = tableTab;
                    }
                    set.add(tableTab + "|" + oper);
                    break;
                case HiveParser.TOK_TABREF:// inputTable
                    ASTNode tabTree = (ASTNode) ast.getChild(0);
                    String tableName = (tabTree.getChildCount() == 1) ? BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) tabTree.getChild(0))
                            : BaseSemanticAnalyzer.getUnescapedName((ASTNode) tabTree.getChild(0))
                            + "." + tabTree.getChild(1);
                    set.add(tableName + "|" + oper);
                    break;
                case HiveParser.TOK_TABLE_OR_COL:
                    break;
                case HiveParser.TOK_ALLCOLREF:
                case HiveParser.TOK_SUBQUERY:
                    break;
                case HiveParser.TOK_SELEXPR:
                    break;
                case HiveParser.DOT:
                    break;
                case HiveParser.TOK_ALTERTABLE_ADDPARTS:
                case HiveParser.TOK_ALTERTABLE:
                    String tabname = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    if (oper == Oper.ALTER) {
                        nowQueryTable = tabname;
                    }
                    set.add(tabname + "|" + "ALTER");
                    break;
                case HiveParser.TOK_ALTERTABLE_ADDCOLS:
                    ASTNode alterTableName = (ASTNode) ast.getChild(0);
                    set.add(alterTableName.getText() + "|" + oper);
                    break;
                case HiveParser.TOK_DROPTABLE:
                    String tablename = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    if (oper == Oper.DROPTABLE) {
                        nowQueryTable = tablename;
                    }
                    set.add(tablename + "|" + "DROP");
                    break;
                case HiveParser.TOK_CREATETABLE:
                    String table = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    if (oper == Oper.CREATETABLE) {
                        nowQueryTable = table;
                    }
                    set.add(table + "|" + "CREATETABLE");
                    break;
                case HiveParser.TOK_DELETE_FROM:
                    String table1 = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    if (oper == Oper.DELETE) {
                        nowQueryTable = table1;
                    }
                    set.add(table1 + "|" + "DELETE");
                    break;
                case HiveParser.TOK_LIMIT:
                    String limitNum = BaseSemanticAnalyzer
                            .getUnescapedName((ASTNode) ast.getChild(0));
                    set.add("LIMIT " + Integer.parseInt(limitNum));
                    break;
            }
        }
        return set;
    }

    private Set<String> parseChildNodes(ASTNode ast) {
        Set<String> set = new HashSet<String>();
        int numCh = ast.getChildCount();
        if (numCh > 0) {
            for (int num = 0; num < numCh; num++) {
                ASTNode child = (ASTNode) ast.getChild(num);
                set.addAll(parseIteral(child));
            }
        }
        return set;
    }

    private void prepareToParseCurrentNodeAndChilds(ASTNode ast) {
        if (ast.getToken() != null) {
            switch (ast.getToken().getType()) {
                case HiveParser.TOK_RIGHTOUTERJOIN:
                case HiveParser.TOK_LEFTOUTERJOIN:
                case HiveParser.TOK_JOIN:
                    break;
                case HiveParser.TOK_QUERY:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    nowQueryTable = "";
                    oper = Oper.SELECT;
                    break;
                case HiveParser.TOK_INSERT:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    oper = Oper.INSERT;
                    break;
                case HiveParser.TOK_SELECT:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    oper = Oper.SELECT;
                    break;
                case HiveParser.TOK_DROPTABLE:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    nowQueryTable = "";
                    oper = Oper.DROP;
                    break;
                case HiveParser.TOK_TRUNCATETABLE:
                    oper = Oper.TRUNCATE;
                    break;
                case HiveParser.TOK_LOAD:
                    oper = Oper.LOAD;
                    break;
                case HiveParser.TOK_CREATETABLE:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    nowQueryTable = "";
                    oper = Oper.CREATETABLE;
                    break;
                case HiveParser.TOK_ALTERTABLE:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    nowQueryTable = "";
                    oper = Oper.ALTER;
                    break;
                case HiveParser.TOK_DELETE_FROM:
                    tableNameStack.push(nowQueryTable);
                    operStack.push(oper);
                    nowQueryTable = "";
                    oper = Oper.DELETE;
                    break;
                case HiveParser.TOK_LIMIT:
                    System.out.println("limit,xxxxxxxxxxxxxxxxxxxxxx");
                    oper = Oper.LIMIT;
                    operStack.push(oper);
            }
            if (ast.getToken() != null
                    && ast.getToken().getType() >= HiveParser.TOK_ALTERDATABASE_PROPERTIES
                    && ast.getToken().getType() <= HiveParser.TOK_ALTERVIEW_RENAME) {
                oper = Oper.ALTER;
            }
        }
    }

    public static String unescapeIdentifier(String val) {
        if (val == null) {
            return null;
        }
        if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') {
            val = val.substring(1, val.length() - 1);
        }
        return val;
    }

    public Set<String> parse(String sql) throws Exception {
        sql = sql.toLowerCase();
        String showtables = sql.replace(" ", "");

        if (showtables.indexOf("descformatted") == 0
                || showtables.indexOf("describeformatted") == 0) {
            showtables = showtables.replace("describeformatted", "select * from ");
            showtables = showtables.replace("descformatted", "select * from ");
            sql = showtables;
        } else if (sql.indexOf("desc") == 0 || sql.indexOf("describe") == 0) {
            sql = sql.replace("describe", "select * from ");
            sql = sql.replace("desc", "select * from ");
        } else if (showtables.indexOf("showcreatetable") == 0) {
            showtables = showtables.replace("showcreatetable", "select * from ");
            sql = showtables;
        } else if (showtables.indexOf("showtablesin") == 0) {
            showtables = showtables.replace("showtablesin", "select * from ")
                    .replace("---------", ".test ----");
            sql = showtables;

        }
        ParseDriver pd = new ParseDriver();
        Configuration hadoopConf = new Configuration();
        hadoopConf.set("_hive.hdfs.session.path", "/tmp");
        hadoopConf.set("_hive.local.session.path", "/tmp");
        Context ctx = new Context(hadoopConf);
        ASTNode ast = pd.parse(sql, ctx);
        //System.out.print(ast);
        Set<String> set = parseIteral(ast);
        //System.out.println("***************table name***************");
        //System.out.println(set);
        return set;
    }

    private ASTNode findRootNonNullToken(ASTNode tree) {
        if (tree == null) {
            return null;
        }
        while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
            tree = (ASTNode) tree.getChild(0);
        }
        return tree;
    }

    public Map<String, String> getAlterTablesMap(String sql) throws ParseException {
        ParseDriver pd = new ParseDriver();
        ASTNode node = findRootNonNullToken(pd.parse(sql));
        System.out.println("dump: " + node.dump());

        Map<String, String> map = new HashMap<String, String>();
        if (node.getToken().getType() == HiveParser.TOK_ALTERTABLE) {
            int childCount = node.getChildCount();
            if (childCount == 2) {
                ASTNode n0 = (ASTNode) node.getChild(0);
                if ("TOK_TABNAME".equals(n0.getToken().getText())) {


                    if (n0.getChildCount() == 2) {
                        String db = ((ASTNode) n0.getChild(0)).getText();
                        String tb = ((ASTNode) n0.getChild(1)).getText();
                        map.put(SRC_DB_TB_NAME, db + "." + tb);
                    } else {
                        String tb = ((ASTNode) n0.getChild(0)).getText();
                        map.put(SRC_DB_TB_NAME, tb);
                    }
                }

                ASTNode n1 = (ASTNode) node.getChild(1);
                if ("TOK_ALTERTABLE_RENAME".equals(n1.getToken().getText())) {

                    if (n1.getChildCount() == 1) {
                        if (n1.getChild(0).getChildCount() == 2) {
                            String newDb = n1.getChild(0).getChild(0).getText();
                            String newtb = n1.getChild(0).getChild(1).getText();
                            map.put(NEW_DB_TB_NAME, newDb + "." + newtb);
                        } else {
                            String newtb = n1.getChild(0).getChild(0).getText();
                            map.put(NEW_DB_TB_NAME, newtb);
                        }
                    }

                }
            }
        }
        return map;

    }

    public static void main(String[] args) throws Exception {
        ParseDriver pd = new ParseDriver();
//        String sql24 = "select a.*,b.floan_channel_name from (\n" +
//                "select floan_channel_id ,count(1)   from dp_snap.postloan_db_t_asset_bill where  fstatus = 1 and freal_repay_type = 0 and finsure_mode_no  = '' \n" +
//                "and floan_channel_id in (select floan_channel_id from dp_snap.postloan_db_t_finance_config where fstatus = 1 and Fis_insurance = 1 ) \n" +
//                "group by floan_channel_id )a left join \n" +
//                "dp_snap.postloan_db_t_finance_config b\n" +
//                " on a.floan_channel_id = b.floan_channel_id where b.floan_channel_id is not null ";


//        String sql24 = "select * from dp_jckx_mart.risk_order_backtrack_lifecycle where f_p_date = current_date()-1 limit 100";

        String sql24 = "select\n" +
                "    case when ta.`1st_180_month` is not null then 'wo' else 'm0_6' end ftype,\n" +
                "    sum(fpaying_capital)/100 balance\n" +
                "from dp_jckx_mart.mis_union ta \n" +
                "inner join dp_jckx_mart.newml_m3_m6_eco_2022q1_0401 p on ta.forder_id = p.forder_id\n" +
                "left join lx_dws.dws_fin_ord_order_detail_df tb  on ta.forder_id = tb.forder_id\n" +
                "where \n" +
                "    ta.f_p_mon = '2022-03' \n" +
                "    and tb.ffin_one_level_name <> '导流'\n" +
                "group by \n" +
                "    case when ta.`1st_180_month` is not null then 'wo' else 'm0_6' end";

        String sql6 = "drop table dp_ai_mart.ailab_graph_model_call_node_fea_backtrack---------arnoldwang -----";

        String sql9 = "create table dp_ai_tmp.second_account_dxzp_Xray_features1_0413 stored as orc as select * from dp_ai_tmp.second_account_dxzp_Xray_features1_0411 union select * from dp_ai_tmp.second_account_dxzp_Xray_features1_0412_1 union select * from dp_ai_tmp.second_account_dxzp_Xray_features1_0412_2 union select * from dp_ai_tmp.second_account_dxzp_Xray_features1_0412_3---------gracetan -----";

        String sql10 = "select t1.*,t2.* from\n" +
                "(\n" +
                "  select distinct factivity_channel,fuid\n" +
                "  from lx_dwd.dwd_maiya_invite_activity_user_channel_detail \n" +
                "  where cast(fbind_time as date)>='2022-04-14'\n" +
                "  and fuid in \n" +
                " (\n" +
                "   select distinct fuid  from dp_xxhf_mart.t_maiya_order_detail\n" +
                "   where  date(fcreate_time) between '2022-04-23' and '2022-04-23'\n" +
                "   and forder_state>=360\n" +
                "   and (fuid < 3000000 or fuid > 5000000) \n" +
                "   and fmaiya_order_plat_source=2\n" +
                " )\n" +
                ") t1\n" +
                "left join dp_fksx_mart.north_maiya_rcorder_id_total_out t2\n" +
                "on t2.fuid=t1.fuid\n" +
                "limit 10";

        String sql11 = "select t1.fagent,t1.fsource_code,t1.fsource_name,t1.Ftemplate_id,t2.num1\n" +
                "from\n" +
                "(select b.fagent,b.fsource_code,b.fsource_name,count(distinct a.fuid) num1\n" +
                "    from dp_snap.rc_order_db_t_rc_order a\n" +
                "    join dp_ph_mart.yhfx_user_base_info b \n" +
                "    on a.fuid = b.fuid\n" +
                "    where a.Fcredit_type in (10,20)\n" +
                "        and a.Frc_order_state=20\n" +
                "        and a.Fapproval_code is not null\n" +
                "        -- and dod.Fapproval_source in (2,3,5,26)\n" +
                "        and substr(a.fcreate_time,1,10) = '2022-05-13'\n" +
                "        and a.fuid not in\n" +
                "            (select fuid\n" +
                "            from dp_snap.rc_order_db_t_rc_order\n" +
                "            where Frc_order_state>=350\n" +
                "                and substr(fcreate_time,1,10) = '2022-05-13')\n" +
                "        --and ffirst_channel_code in(80,99,30,40,62)\n" +
                "    group by b.fagent,b.fsource_code,b.fsource_name\n" +
                ")t1\n" +
                "left join (select * from dp_snap.agency_db_t_template_agent_bind) t2--\n" +
                "on t1.fagent=t2.fagent limit 10";


        String parsesql = sql10;

        HiveSqlParse hp = new HiveSqlParse();
        System.out.println(parsesql);
        Set<String> tables = hp.parse(parsesql);
        System.out.println("====================");
        System.out.print(tables);
        //System.out.println(ast.toStringTree());
    /*for (int i = 0; i < 1; i++) {
      long start = System.currentTimeMillis();
      Set<String>  set = hp.parse(sql24);
      long end = System.currentTimeMillis();
      System.out.println( end - start);
      System.out.println( set.size());
      System.out.println( set);
      
    //如果建表，则对sql 校验存储格式
    //如果建表，则对sql 校验存储格式
    }*/
        //JDBCInterpreter.checkUser("normal", sql24);
    }
}
