package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

public class DwdTestQuestion {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4 设置状态的失效时间
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        //过滤出测验问题数据
        Table testExamquestion = tableEnv.sqlQuery("select \n" +
                "data['id'] id,\n" +
                "data['user_id'] user_id,\n" +
                "data['question_id'] question_id,\n" +
                "data['is_correct'] is_correct,\n" +
                "ts \n" +
                "from `topic_db`\n" +
                "where `table` = 'test_exam_question'\n" +
                "and `type` = 'insert'\n");
        tableEnv.createTemporaryView("test_exam_question", testExamquestion);

        //将关联的结果写到kafka主题
        //创建动态表和要写入的kafka主题进行映射
        tableEnv.executeSql("create table dwd_test_question_inc(\n" +
                "id string,\n" +
                "user_id string,\n" +
                "question_id string,\n" +
                "is_correct string,\n" +
                "ts string,\n" +
                "primary key(id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_test_question_inc"));

        //写入
        tableEnv.executeSql("insert into dwd_test_question select * from test_exam_question");

        env.execute();
    }
}
