package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class DwdTestExam {

    public static void main(String[] args) throws Exception {

        /// TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //todo 2.从 Kafka 读取业务数据，封装为 Flink SQL 表
        tableEnv.executeSql("create table topic_db(" +
                "`database` string, " +
                "`table` string, " +
                "`type` string, " +
                "`data` map<string, string>, " +
                "`ts` string " +
                ")" + MyKafkaUtil.getKafkaDDL("topic_db", "test_exam_06"));

        //todo 3.读取试卷考试数据，封装为表
        Table testExam = tableEnv.sqlQuery("select " +
                "data['id'] id, " +
                "data['paper_id'] paper_id, " +
                "data['user_id'] user_id, " +
                "data['score'] score, " +
                "data['duration_sec'] duration_sec, " +
                "date_format(data['create_time'],'yyyy-MM-dd') date_id, " +
                "data['create_time'] create_time, " +
                "ts " +
                "from topic_db " +
                "where `table` = 'test_exam' " +
                "and `type` = 'insert' ");

        tableEnv.createTemporaryView("testExam", testExam);
        //打印测试
        tableEnv.toAppendStream(testExam, Row.class).print(">>>>>>>>");

        //todo 4.建立 Kafka-Connector dwd_test_exam表
        tableEnv.executeSql("create table dwd_test_exam( " +
                "    `id` String, " +
                "    `paper_id` String, " +
                "    `user_id` String, " +
                "    `score` String, " +
                "    `duration_sec` String, " +
                "    `date_id` String, " +
                "    `create_time` String, " +
                "    `ts` String " +
                ") " + MyKafkaUtil.getInsertKafkaDDL("dwd_test_exam"));

        //todo 5.将数据写到 kafka-Connector 表
        tableEnv.executeSql("insert into dwd_test_exam select * from testExam");

        //打印测试
        env.execute("DWDTESTExam");
    }

}
