package com.atguigu.app.dwd.db;

import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * ClassName: DwdCartAdd
 * Package: com.atguigu.app.dwd
 * Description:
 *
 * @Author Lovxy
 * @Create 2023/5/15 19:25
 * @Version 1.0
 */
//数据流：web/app -> Mysql -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程 序：Mock -> Mysql -> Maxwell -> Kafka(ZK) -> TradeCartAdd -> Kafka(ZK)
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2. 状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 3.使用FlinkSQL方式读取Kafka topic_db 主题数据
        tableEnv.executeSql(KafkaUtil.getTopicDbDDL("dwd_cart_add"));

        //TODO 4.过滤出加购数据
        Table cartInfoTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    `data`['id'],\n" +
                "    `data`['user_id'],\n" +
                "    `data`['course_id'],\n" +
                "    `data`['course_name'],\n" +
                "    `data`['cart_price'],\n" +
                "    `data`['session_id'],\n" +
                "    `data`['create_time'],\n" +
                "    `data`['update_time'],\n" +
                "    `data`['deleted'],\n" +
                "    `data`['sold']\n"+
                " from topic_db\n" +
                " where `database`='edu'\n" +
                " and `table`='cart_info'\n" +
                " and `type`='insert'");
        tableEnv.createTemporaryView("cart_info", cartInfoTable);


        //TODO 8.创建Kafka DWD层加购事实表主题
        tableEnv.executeSql("create table dwd_trade_cart_add(\n" +
                " `id` STRING,\n" +
                " `user_id` STRING,\n" +
                " `course_id` STRING,\n" +
                " `course_name` STRING,\n" +
                " `cart_price` STRING,\n" +
                " `session_id` STRING,\n" +
                " `create_time` STRING,\n" +
                " `update_time` STRING,\n" +
                " `deleted` STRING,\n" +
                " `sold` STRING\n" +
                ")"+KafkaUtil.getKafkaSinkConnOption("dwd_trade_cart_add"));

        //TODO 9.写出数据
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from cart_info");

    }
}
