package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 *交易域加购事务事实表
 * @Author zhang
 * @Time 2022-08-19-11:29
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies
//                .failureRateRestart(10,
//                        Time.of(3L, TimeUnit.DAYS),
//                        Time.of(1L, TimeUnit.MINUTES)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.从kafka topic_db主题读取数据，封装为 Flink SQL 表
        tableEnv.executeSql(MyKafkaUtil.getTopicDb("topic_db"));

        Table resulttable = tableEnv.sqlQuery("select " +
                "    `data`['id'] id, " +
                "    `data`['user_id'] user_id, " +
                "    `data`['course_id'] course_id, " +
                "    `data`['course_name'] course_name, " +
                "    `data`['cart_price'] cart_price, " +
                "    `data`['session_id'] session_id, " +
                "    `data`['create_time'] create_time, " +
                "    `data`['update_time'] update_time, " +
                "    `data`['deleted'] deleted, " +
                "    `data`['sold'] sold " +
                "from topic_db " +
                "    where  " +
                "        `database`='gmall' " +
                "    and " +
                "        `table`='cart_info'");
        //创建临时表
            tableEnv.createTemporaryView("cart_info",resulttable);

            //TODO 3.构建DWD层加购事实表
        tableEnv.executeSql("create table dwd_trade_cart_add( " +
                "    `id` STRING, " +
                "    `user_id` STRING, " +
                "    `course_id` STRING, " +
                "    `course_name` STRING, " +
                "    `cart_price` STRING, " +
                "    `session_id` STRING, " +
                "    `create_time` STRING, " +
                "    `update_time` STRING, " +
                "    `deleted` STRING, " +
                "    `sold` STRING " +
                ")"+MyKafkaUtil.getInsertKafkaDDL("dwd_trade_cart_add"));

        //TODO 4.将数据写入kafka
            tableEnv.executeSql("insert into dwd_trade_cart_add select * from cart_info");
    }
}
