package com.atguigu.edu.app.dwd.db;

import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO　2 设置状态后端
                 /*
                 env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                 env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                 env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                 env.setStateBackend(new HashMapStateBackend());
                 env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                 System.setProperty("HADOOP_USER_NAME", "atguigu");
                  */
        //TODO 3 读取kafka对应主题topic_db的数据
        String topicName = "topic_db";
        String groupID = "dwd_trade_cart_add";
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` bigint,\n" +
                "  `xid` bigint,\n" +
                "  `commit` string,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string>, \n" +
                "   pt AS PROCTIME() \n" +
                ") " + KafkaUtil.getKafkaDDL(topicName, groupID));

        //TODO 4 过滤出加购信息
        Table cartAddTable = tableEnv.sqlQuery("select\n" +
                "`data`['id'] id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['course_name'] course_name,\n" +
                "`data`['cart_price'] cart_price,\n" +
                "`data`['session_id'] session_id,\n" +
                "`data`['create_time'] create_time,\n" +
                "ts\n" +
                "from topic_db\n" +
                "where `table` = 'cart_info'" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("cartAdd", cartAddTable);


        //TODO 5 写出到kafka新的主题
        tableEnv.executeSql("create table kafka_sink(\n" +
                " id string,\n" +
                " user_id string,\n" +
                " course_id string,\n" +
                " course_name string,\n" +
                " cart_price string,\n" +
                " session_id string,\n" +
                " create_time string,\n" +
                " ts bigint\n" +
                ")" + KafkaUtil.getKafkaSinkDDL("dwd_trade_cart_add"));
        tableEnv.executeSql("insert into kafka_sink select * from cartAdd");

    }
}
