package app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import utils.MyKafkaUtil;

import java.time.Duration;
//数据流：web/app ->Mysql -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程 序: Mock -> Mysql -> Maxwell -> Kafka(ZK) -> Dwd02_trade_cart_add -> Kafka(ZK)

public class Dwd02_trade_cart_add {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(5));
        //  状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");
        //2.使用flinksql的方式kafka ODS层读取topic_db的数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("trade_cart_add"));
        //Table dbTable = tableEnv.sqlQuery("select * from topic_db");
        //tableEnv.toAppendStream(dbTable,Row.class).print("dbTable>>>>>>");



        //3.筛选出加购cart_info的数据
        Table cartInfoTable = tableEnv.sqlQuery("select \n" +
                "`data`['id'] id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['course_name'] course_name,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['session_id'] session_id,\n" +
                "`data`['cart_price'] cart_price\n" +
                "from topic_db\n" +
                "where `database`='edu'\n" +
                "and `table`='cart_info'\n" +
                "and `type`='update'" );
        tableEnv.createTemporaryView("cart_info",cartInfoTable);
        tableEnv.toAppendStream(cartInfoTable,Row.class).print("cartInfoTable>>>>>");




        //4.通过普通Kafka创建sink
        tableEnv.executeSql("create table dwd_cart_info(\n" +
                "id string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "create_time string,\n" +
                "session_id string,\n" +
                "cart_price string\n" +
                ")"+MyKafkaUtil.getKafkaSinkConnOption("dwd_cart_info"));

        Table resultTable = tableEnv.sqlQuery("select * from dwd_cart_info");
        tableEnv.toAppendStream(resultTable,Row.class).print("resultTable>>>>>>");
        //5.写入数据
        tableEnv.executeSql("insert into dwd_cart_info select * from cart_info");
        env.execute();


    }
}
