package com.atguigu.edu.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.MyKafkaUtil;

public class DwdTradeCartAddUu {
    public static void main(String[] args) {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(4);
        //2.检查点相关设置
        //3.建立连接器表，从kafka的ODS_BASE_DB中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDDL("user_cart_add_group"));
        //4.查询出加购的数据（表名是cart_info，类型是insert）,并进行注册
        Table cartAddTable = tableEnv.sqlQuery("select " +
                " data['id'] id," +
                " data['user_id'] user_id," +
                " data['course_id'] course_id," +
                " data['course_name'] course_name," +
                " data['create_time'] create_time," +
                " data['update_time'] update_time," +
                " ts" +
                " from ODS_BASE_DB" +
                " where `table`='cart_info' and `type`='insert'");
        tableEnv.createTemporaryView("cart_add",cartAddTable);
        //5.创建动态表，和要写入的kafka主题做映射
        tableEnv.executeSql("create table dwd_trade_cart_add" +
                "(" +
                "id string," +
                "user_id string," +
                "course_id string," +
                "course_name string," +
                "create_time string," +
                "update_time string," +
                "ts string," +
                "primary key(id) not enforced" +
                ")"+MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));
        //6.将第5步的查询结果，插入到动态表，写入kafka的主题中
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from cart_add");
    }
}
