package realtime.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import realtime.util.MyKafkaUtil;

public class Dwd01_TradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(10000L);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/flinkcdc/220926");

        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.使用FlinkSQL方式读取Kafka topic_db 主题数据创建表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("cart_add_edu"));

        //打印测试
//        tableEnv.sqlQuery("select * from topic_db").execute().print();

        //TODO 3.过滤出加购数据
        Table cartInfoTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    `data`['id'] id,\n" +
                "    `data`['user_id'] user_id,\n" +
                "    `data`['course_id'] course_id,\n" +
                "    `data`['course_name'] course_name,\n" +
                "    `data`['cart_price'] cart_price,\n" +
                "    `data`['session_id'] session_id,\n" +
                "    `data`['create_time'] create_time,\n" +
                "    `data`['update_time'] update_time,\n" +
                "    `data`['deleted'] deleted,\n" +
                "    `data`['sold'] sold\n" +
//                "    pt\n" +
                "from topic_db\n" +
                "where `database` = 'gmall'\n" +
                "and `table` = 'cart_info'\n" +
                "and `type` = 'insert'");

        tableEnv.createTemporaryView("cart_info", cartInfoTable);

        //打印测试
//        tableEnv.sqlQuery("select * from cart_info").execute().print();

        //TODO 4.创建 DWD层 Kafka加购事实表
        tableEnv.executeSql("" +
                "create table dwd_cart_info(\n" +
                "    `id` STRING,\n" +
                "    `user_id` STRING,\n" +
                "    `course_id` STRING,\n" +
                "    `course_name` STRING,\n" +
                "    `cart_price` STRING,\n" +
                "    `session_id` STRING,\n" +
                "    `create_time` STRING,\n" +
                "    `update_time` STRING,\n" +
                "    `deleted` STRING,\n" +
                "    `sold` STRING\n" +
                ")" + MyKafkaUtil.getKafkaSinkDDL("dwd_trade_cart_add"));

        //TODO 5.将数据写出
        tableEnv.executeSql("insert into dwd_cart_info select * from cart_info");
    }
}
