package com.atguigu.app.dwd.db;

import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dwd10_Study_Test_Exam_Question {
    public static void main(String[] args) {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.获取配置对象,并为关联时状态中存储的数据设置过期时间
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.exec.state.ttl", "5s");

        //3.从Kafka读取topic_db数据,封装为FlinkSQL表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_study_test_exam_question"));

        //4.读取测验问题表数据
        Table testExamQuestion = tableEnv.sqlQuery("" +
                "select " +
                "    data['id'] id, " +
                "    data['exam_id'] exam_id, " +
                "    data['paper_id'] paper_id, " +
                "    data['question_id'] question_id, " +
                "    data['user_id'] user_id, " +
                "    data['answer'] answer, " +
                "    data['is_correct'] is_correct, " +
                "    data['score'] score, " +
                "    data['create_time'] create_time, " +
                "    data['update_time'] update_time, " +
                "    data['deleted'] deleted, " +
                "    proc_time, " +
                "    ts " +
                "from topic_db " +
                "where `table` = 'test_exam_question' " +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("test_exam_question", testExamQuestion);

        //5.筛选有用字段
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "    teq.id, " +
                "    teq.exam_id, " +
                "    teq.paper_id, " +
                "    teq.question_id, " +
                "    teq.user_id, " +
                "    teq.answer, " +
                "    teq.is_correct, " +
                "    teq.score, " +
                "    teq.ts " +
                "from test_exam_question teq");
        tableEnv.createTemporaryView("result_table", resultTable);

        //6.建立Kafka-Connector dwd_study_test_exam_question表
        tableEnv.executeSql("" +
                "create table dwd_study_test_exam_question( " +
                "    id string, " +
                "    exam_id string, " +
                "    paper_id string, " +
                "    question_id string, " +
                "    user_id string, " +
                "    answer string, " +
                "    is_correct string, " +
                "    score string, " +
                "    ts string " +
                "    )" + MyKafkaUtil.getKafkaSinkConnOption("dwd_study_test_exam_question"));

        //7.将关联结果写入Kafka-Connector表
        tableEnv.executeSql("" +
                "insert into dwd_study_test_exam_question select * from result_table");
    }
}
