package com.atguigu.app.dwd.db;

import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestTestPaper1 {
    public static void main(String[] args) {
        //1、获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        // TODO 3. 从 Kafka 读取业务数据，封装为 Flink SQL 表
        tableEnv.executeSql("create table topic_db(" +
                "`database` String,\n" +
                "`table` String,\n" +
                "`type` String,\n" +
                "`data` map<String, String>,\n" +
                "`old` map<String, String>,\n" +
                "`pt` as PROCTIME(),\n" +
                "`ts` string\n" +
                ")" + KafkaUtil.getKafkaDDL("topic_db", "dwd_test_test_paper"));

        // TODO 4 过滤出订单详情表
        Table testExam = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['score'] score,\n" +
                "  `data`['duration_sec'] duration_sec,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['submit_time'] submit_time,\n" +
                "  `data`['update_time'] update_time,\n" +
                "  `data`['deleted'] deleted,\n" +
                "  `ts`\n" +
                "from topic_db\n" +
                "where `table`='test_exam'\n" +
                "and `type`='insert'");
        tableEnv.createTemporaryView("test_exam",testExam);






        tableEnv.executeSql("create table dwd_test_test_exam1(\n" +
                "   id string,\n" +
                "   paper_id string,\n" +
                "   user_id string,\n" +
                "   score string," +
                "   duration_sec String,\n" +
                "   create_time string,\n" +
                "   submit_time string,\n" +
                "   update_time string,\n" +
                "   deleted string,\n" +
                "   ts string \n" +
                ")" + KafkaUtil.getKafkaSinkDDL("dwd_test_test_exam1"));



        tableEnv.executeSql("insert into dwd_test_test_exam1 select * from test_exam");




    }
}
