package cn.dmrliu.edu.realtime.app.dwd.db;

import cn.dmrliu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeCartAdd {
    public static void main(String[] args) {
        // TODO 1.基本环境的准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2.检查点的设置

        // TODO 3.从kafka读取数据，创建动态表
        String group = "edu_dwd_trade_cart_add_group";
        tableEnv.executeSql(KafkaUtil.getEduDbDDL(group));

//        tableEnv.executeSql(" select `old` from edu_db where `table` = 'cart_info' and `type` = 'update' ").print();

        // TODO 4.筛选加购信息
        Table cartAdd = tableEnv.sqlQuery("select \n" +
                " `data`['id'] id,\n" +
                " `data`['user_id'] user_id,\n" +
                " `data`['course_id'] course_id,\n" +
                " `data`['course_name'] course_name,\n" +
                " ts\n" +
                "from edu_db\n" +
                "where `table` = 'cart_info'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("cart_add", cartAdd);

//        tableEnv.executeSql("select * from cart_add").print();

        // TODO 5.写入kafka中
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
                "  id string,\n" +
                "  user_id string,\n" +
                "  course_id string,\n" +
                "  course_name string,\n" +
                "  ts string,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") "
                + KafkaUtil.getUpsertKafkaDDL("edu_dwd_trade_cart_add"));

        tableEnv.executeSql("insert into dwd_trade_cart_add select * from cart_add");


    }
}
