package com.lzwk.app.dwd;

import com.lzwk.config.JobConfig;
import com.lzwk.tableSql.kafka.source.EarthLog;
import com.lzwk.tableSql.hive.HiveSink;
import com.lzwk.utils.HiveUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: CC
 * @Date: 2022/2/18 11:54
 */
public class BaseLogSink {
    public static String hiveSinkGroupId = "flink_2_cos";

    public static String insertSql(String sourceTableName, String sinkTableName) {
        return "insert into hive.warehouse." + sinkTableName + " select * from " + sourceTableName + " where evt <> '_beat' ";
    }

    public static String insertSqlDirty(String sourceTableName, String sinkTableName) {
        return "insert into hive.warehouse." + sinkTableName + " select `@timestamp` as ts, `@filepath` as filepath, log,DATE_FORMAT(CURRENT_TIMESTAMP,'yyyy-MM-dd') as dt,DATE_FORMAT(CURRENT_TIMESTAMP,'HH') as hr from " + sourceTableName;
    }

    public static void createSourceKafkaTables(StreamTableEnvironment tableEnv) {
        EarthLog.odsEarthLog(tableEnv, "ods_earth_log_clean", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_wk_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_wks_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_wka_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_lha_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_oc_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_lx_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dwdEarthLog(tableEnv, "dwd_others_log", BaseLogSink.hiveSinkGroupId);
        EarthLog.dirtyEarthLog(tableEnv, "dirty_earth_log", BaseLogSink.hiveSinkGroupId);
    }


    public static void createSinkKafkaTables(StreamTableEnvironment tableEnv) {
        tableEnv.useCatalog("hive");
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        HiveSink.odsEarthLog(tableEnv, "ods_earth_event_n", "ods/ods_earth_event_n");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_wk_h5_event_n_tr_1d", "dwd/earth_wk_h5_event_n_tr_1d");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_wk_server_event_n_tr_1d", "dwd/earth_wk_server_event_n_tr_1d"); /*没数据*/
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_wk_app_event_n_tr_1d", "dwd/earth_wk_app_event_n_tr_1d");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_lh_app_event_n_tr_1d", "dwd/earth_lh_app_event_n_tr_1d");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_ocpx_event_n_tr_1d", "dwd/earth_ocpx_event_n_tr_1d");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_lx_h5_event_n_tr_1d", "dwd/earth_lx_h5_event_n_tr_1d");
        HiveSink.dwdEarthLog(tableEnv, "dwd_earth_others_event_n_tr_1d", "dwd/earth_others_event_n_tr_1d");
        HiveSink.dirtyEarthLog(tableEnv, "ods_dirty_earth_dirty_event_n_tr_1d", "ods/earth_dirty_event_n_tr_1d");
        tableEnv.useCatalog("default_catalog");
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
    }

    public static void insertTable(StatementSet statementSet) throws InterruptedException {
        statementSet.addInsertSql(BaseLogSink.insertSql("ods_earth_log_clean","ods_earth_event_n"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_wk_log","dwd_earth_wk_h5_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_wks_log","dwd_earth_wk_server_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_wka_log","dwd_earth_wk_app_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_lha_log","dwd_earth_lh_app_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_oc_log","dwd_earth_ocpx_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_lx_log","dwd_earth_lx_h5_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSql("dwd_others_log","dwd_earth_others_event_n_tr_1d"));
        statementSet.addInsertSql(BaseLogSink.insertSqlDirty("dirty_earth_log","ods_earth_dirty_event_n_tr_1d"));
        statementSet.execute().wait();
    }

    public static void main(String[] args) throws InterruptedException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        JobConfig prod = new JobConfig("prod", env);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        Configuration configuration = tableEnv.getConfig().getConfiguration();

        configuration.setString("table.exec.source.idle-timeout", "5min");
        configuration.setString("pipeline.name", "earth_log_sink");
        configuration.setString("table.exec.state.ttl", "24h");

        StatementSet statementSet = tableEnv.createStatementSet();

        HiveUtil.initHiveCatalog(tableEnv);

        BaseLogSink.createSourceKafkaTables(tableEnv);
        BaseLogSink.createSinkKafkaTables(tableEnv);
        BaseLogSink.insertTable(statementSet);
    }


}
