package com.edu.realtime.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import com.edu.realtime.util.MyKafkaUtil;

/**
 * Created on 2022/10/18.
 *
 * @author Topus
 * @desc 考试域：考试操作事务事实表 dwd_test_exam_do_paper
 */
public class DwdTestExamDoPaper {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 读取配置表信息
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_test_exam_do_paper_group"));

        // 读取topic_db内容
        Table resultTable =
            tableEnv.sqlQuery("select data['id'] id, data['exam_id'] exam_id, data['paper_id'] paper_id,\n"
                + "data['question_id'] question_id, data['user_id'] user_id,\n"
                + "data['is_correct'] is_correct, data['score'] score, ts\n" + "from topic_db\n"
                + "where `table`='test_exam_question' and `type`='insert'");
        tableEnv.createTemporaryView("result_table", resultTable);

        // 建立 Upsert-Kafka dwd_exam_do_paper 表
        tableEnv.executeSql("create table dwd_test_exam_do_paper(\n"
            + "id string, exam_id string, paper_id string, question_id string,\n"
            + "user_id string, is_correct string, score string, ts string,\n" + "primary key(id) not enforced\n" + ")"
            + MyKafkaUtil.getUpsertKafkaDDL("dwd_test_exam_do_paper"));

        // 数据写入dwd_test_exam_do_paper主题
        tableEnv.executeSql("insert into dwd_test_exam_do_paper select * from result_table");
    }
}