package com.epic.merge;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Map;

public class Main {

    public static SimpleDateFormat fromatter = new SimpleDateFormat("yyyyMMdd");

    public static String metaHead = "select distinct(part) from (select concat(T.\"TBL_NAME\",'%',split_part(split_part(P.\"PART_NAME\",'/',1),'=',2)) as part FROM \"metastore\".\"PARTITIONS\" P, \"metastore\".\"TBLS\" T where T.\"TBL_ID\" = P.\"TBL_ID\" and T.\"TBL_NAME\" LIKE 'tmp_%'  and split_part(split_part(P.\"PART_NAME\",'/',1),'=',2) < ";

    public static ArrayList<TableInfoShortCut> getMetastoreTmpTable() {
        Date date = new Date(System.currentTimeMillis());
        String lastDay = fromatter.format(date);
        ArrayList<TableInfoShortCut> ret = new ArrayList<>();
        String metaTail = String.format(" '%s' order by P.\"CREATE_TIME\") A", new Object[] { lastDay });
        String getTmpTableSql = metaHead + metaTail;
        ArrayList<Map<String, Object>> dbData = DataBaseUtils.query(DataBaseUtils.getConnection(), getTmpTableSql);
        String tHead = "select string_agg(name,',') as fields from (select name from safelog_field where safelog_name = '%s' order by id) A";
        for (Map<String, Object> tmp : dbData) {
            TableInfoShortCut tInfo = new TableInfoShortCut();
            String[] tName = tmp.get("part").toString().split("%");
            tInfo.setTable(tName[0].substring(4, tName[0].length()));
            tInfo.setNs_date(tName[1]);
            String tFiledSql = String.format(tHead, new Object[] { tName[0] });
            if (tFiledSql.isEmpty()) {
                tInfo.setTableField(getTableFileds(tFiledSql));
            } else {
                tInfo.setTableField(" log_id,raw,dev_ip,nstimestamp ");
            }
            ret.add(tInfo);
        }
        return ret;
    }

    public static String getTableFileds(String sql) {
        String ret = "";
        ArrayList<Map<String, Object>> dbData = DataBaseUtils.query(DataBaseUtils.getConnection(), sql);
        for (Map<String, Object> tmp : dbData)
            ret = tmp.get("fields").toString();
        return ret;
    }

    /*
    public static void tmpTransformToHis(SparkSession spark, String table, String part, String fields) {
        String tHead = "insert into table %s partition(ns_date='%s',ns_hour='60') select %s from %s where ns_date='%s'";
        String exeSql = String.format(tHead, new Object[] { "epic.his_" + table, part, fields, "epic.tmp_" + table, part });
        spark.sql(exeSql);
    }
     */

    public static void main(String[] args) {
        /*
        SparkSession spark = SparkSession.builder().config("hive.exec.dynamic.partition", true).config("spark.sql.sources.partitionColumnTypeInference.enabled", false).config("hive.exec.dynamic.partition.mode", "nonstrict").enableHiveSupport().getOrCreate();
        while (true) {
            String deleteSql = "";
            String t = "";
            String nsDate = "";
            String fields = "";
            ArrayList<TableInfoShortCut> tmpTables = getMetastoreTmpTable();
            for (TableInfoShortCut table : tmpTables) {
                t = table.getTable();
                System.out.println(t);
                nsDate = table.getNs_date();
                fields = table.getTableField();
                tmpTransformToHis(spark, t, nsDate, fields);
                deleteSql = String.format("alter table epic.tmp_%s drop partition(ns_date='%s')", new Object[] { t, nsDate });
                spark.sql(deleteSql);
            }
            if (tmpTables.size() == 0)
                break;
            try {
                Thread.sleep(1000L);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
         */
    }
}
