package atguigu.com.edu.app.dwd.db;

import atguigu.com.edu.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdVideoPlay {
    public static void main(String[] args) {

            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(4);

            StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

            // TODO 3. 从 Kafka 读取 topic_db 数据，封装为 Flink SQL 表
            tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_video_play_group"));

            // TODO 4. 读取用户表数据
            Table userInfo = tableEnv.sqlQuery("select\n" +
                    "data['id'] id,\n" +
                    "data['video_name'] video_name,\n" +
                    "data['during_sec'] during_sec,\n" +
                    "data['chapter_id'] chapter_id,\n" +
                    "data['course_id'] course_id,\n" +
                    "ts\n" +
                    "from topic_db\n" +
                    "where `table` = 'user_info'\n" +
                    "and `type` = 'insert'\n");
            tableEnv.createTemporaryView("user_info", userInfo);

            // TODO 5. 创建 Upsert-Kafka dwd_user_register 表
            tableEnv.executeSql("create table `dwd_video_play`(\n" +
                    "`id` string,\n" +
                    "`video_name` string,\n" +
                    "`during_sec` string,\n" +
                    "`chapter_id` string,\n" +
                    "`course_id` string,\n" +
                    "`ts` string,\n" +
                    "primary key(`id`) not enforced\n" +
                    ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_video_play"));

            // TODO 6. 将输入写入 Upsert-Kafka 表
            tableEnv.executeSql("insert into dwd_video_play select * from user_info");
        }
}

