package com.atguigu.realtime.app.dwd.db;

import com.atguigu.realtime.utils.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

public class DwdTestExamQuestion {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */
        //todo 3 读取考试问题表数据封装为表
        String topicName = "topic_db";
        String groupID = "dwd_test_exam_question";
        tableEnv.executeSql("create table topic_db(\n" +
                " `database` string,\n" +
                " `table` string,\n" +
                " `type` string,\n" +
                " `data` map<string,string>,\n" +
                " `ts` bigint,\n" +
                " `pt` AS PROCTIME()\n" +
                ")"+ KafkaUtil.getKafkaDDL(topicName,groupID));




        Table testExamQuestion = tableEnv.sqlQuery("select " +
                " `data`['id'] id," +
                " `data`['exam_id'] exam_id," +
                " `data`['paper_id'] paper_id," +
                " `data`['question_id'] question_id," +
                " `data`['user_id'] user_id," +
                " `data`['answer'] answer," +
                " `data`['is_correct'] is_correct," +
                " `data`['score'] score," +
                " `data`['create_time'] create_time," +
                " date_format(`data`['create_time'],'yyyy-MM-dd') date_id ," +
                " ts," +
                " current_row_timestamp() row_op_ts" +
                " from topic_db" +
                " where `table`='test_exam_question'");

        tableEnv.createTemporaryView("t",testExamQuestion);

        //testExamQuestion.execute().print();


        tableEnv.createTemporaryFunction("split",AnswerSplit.class);


        Table result = tableEnv.sqlQuery(" select " +
                " id," +
                " exam_id," +
                " paper_id," +
                " question_id," +
                " user_id," +
                " answer," +
                " w word," +
                " is_correct," +
                " score," +
                " create_time," +
                " date_id ," +
                " ts," +
                " row_op_ts" +
                " from t" +
                " join lateral table( split(answer) ) as T(w) ON TRUE");

        tableEnv.createTemporaryView("result_table",result);

        //result.execute().print();
        //todo 4 建立表导入到 kafka 表
        tableEnv.executeSql("create table dwd_test_exam_question(" +
                " id string," +
                " exam_id string," +
                " paper_id string," +
                " question_id string," +
                " user_id string," +
                " answer string," +
                " word string," +
                " is_correct string," +
                " score string," +
                " create_time string," +
                " date_id string," +
                " ts bigint," +
                " row_op_ts timestamp_ltz(3)" +
                ")" + KafkaUtil.getKafkaSinkDDL("dwd_test_exam_question"));


        //result.execute().print();
        //todo 6 将数据写入到kafka表
        tableEnv.executeSql("insert into dwd_test_exam_question select * from result_table");




    }@FunctionHint(output = @DataTypeHint("ROW<word STRING>"))
        public static class AnswerSplit extends TableFunction<Row> {
            public void eval(String s){
                if (s == null) {
                    return;
                }
                String[] words = s.split(",");
                for (String word : words) {
                    collect(Row.of(word));
                }
            }}
}
