package com.atguigu.gmall.realtime.test;

import com.atguigu.gmall.realtime.common.base.BaseSqlApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.util.FlinkSqlUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION;

public class Realtime03_TestFlinkSqlCodeStep extends BaseSqlApp {
    public static void main(String[] args) {
        new Realtime03_TestFlinkSqlCodeStep().start(5678,4,"test_flink_sql_code_step");
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv, StreamExecutionEnvironment env) {
        // 通过连接器表读取数据
        readOdsTopicDb(tableEnv,"test_flink_sql_code_step");

        // 对读取到的数据进行处理
        Table resultTable = tableEnv.sqlQuery("select * from topic_db");  // 基于查询到的这个表写入到下游

        // 通过连接器表将数据写出到下游
        // Kafka Connector

        String sinkTable = "CREATE TABLE topic_a (" +
                "  `database` STRING," +
                "  `table` STRING," +
                "  `type` STRING," +
                "  `ts` BIGINT, " +
                "  `data` MAP<STRING,STRING>, " +
                " `old` MAP<STRING,STRING> " +
                ")" + FlinkSqlUtil.getKafkaSinkDDL("TopicA");

        String sinkTable1 = "CREATE TABLE topic_a (" +
                "  `database` STRING," +
                "  `table` STRING," +
                "  `type` STRING," +
                "  `ts` BIGINT, " +
                "  `data` MAP<STRING,STRING>, " +
                " `old` MAP<STRING,STRING>, " +
                " PRIMARY KEY (`table`) NOT ENFORCED  " +
                ") " + FlinkSqlUtil.getUpsertKafkaSinkDDl("TopicA");

        tableEnv.executeSql(sinkTable1);

        resultTable.executeInsert("topic_a");

    }
}
