package com.atguigu.app.dwd.db;

import com.atguigu.util.MyKafkaUtil_lj;
import com.atguigu.util.MySqlUtil_lj;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author 晶晶  为啥运行一会就关闭了
 * @Date 2023 03 24 09 27
 **/
public class Dwd_StudyInfo {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("" +
                "create table topic_db(\n" +
                "   `database` string,\n" +
                "   `table` string,\n" +
                "   `type` string,\n" +
                "   `data` map<string,string>,\n" +
                "   `old` map<string,string>,\n" +
                "   `ts` string\n" +
                ")"+ MyKafkaUtil_lj.getKafkaDDL("topic_db","dwd_paper_info"));
       // tableEnv.sqlQuery("select * from topic_db").execute().print();

        tableEnv.executeSql(MySqlUtil_lj.getPaperSink());
        //tableEnv.sqlQuery("select * from test_paper").execute().print();

        tableEnv.executeSql(MySqlUtil_lj.getPaperQuestionSink());
        //tableEnv.sqlQuery("select * from test_paper_question").execute().print();

        tableEnv.executeSql(MySqlUtil_lj.getTestExamSink());
        //tableEnv.sqlQuery("select * from test_exam").execute().print();

        tableEnv.executeSql(MySqlUtil_lj.getTestExamQuestionSink());
        //tableEnv.sqlQuery("select * from test_exam_question").execute().print();

        Table resultTable = tableEnv.sqlQuery("" +
                "select\n" +
                "   paper.id,\n" +
                "   paper.paper_title,\n" +
                "   paper.course_id,\n" +
                "   paper.create_time ,\n" +
                "   paper.update_time ,\n" +
                "   paper.publisher_id,\n" +
                "   paper.deleted,\n" +
                "   pq.id paper_question_id,\n" +
                "   pq.question_id question_id,\n" +
                "   pq.score,\n" +
                "   pq.publisher_id pq_publisher_id,\n" +
                "   exam.id test_exam_id,\n" +
                "   exam.user_id,\n" +
                "   exam.score exam_score,\n" +
                "   exam.duration_sec,\n" +
                "   exam.create_time exam_create_time,\n" +
                "   exam.submit_time, \n" +
                "   exam.update_time exam_update_time,\n" +
                "   eq.id exam_question_id,\n" +
                "   eq.answer,\n" +
                "   eq.is_correct,\n" +
                "   eq.score exam_question_score\n" +
                " from test_paper paper\n" +
                " join test_paper_question  pq\n" +
                " on paper.id=pq.paper_id\n" +
                " join test_exam exam\n" +
                " on paper.id=exam.paper_id\n" +
                " join test_exam_question eq\n" +
                " on paper.id=eq.paper_id");

        tableEnv.createTemporaryView("result_table",resultTable);
        //tableEnv.sqlQuery("select* from result_table").execute().print();

        tableEnv.executeSql("" +
                "create table dwd_user_test_exam_info(\n" +
                "   `id` bigint,\n" +
                "   `paper_title` string,\n" +
                "   `course_id` bigint,\n" +
                "   `create_time` date,\n" +
                "   `update_time` date,\n" +
                "   `publisher_id` bigint,\n" +
                "   `deleted` string,\n" +
                "   `paper_question_id` bigint,\n" +
                "   `question_id` bigint,\n" +
                "   `score` decimal,\n" +
                "   `pq_publisher_id` bigint,\n" +
                "   `test_exam_id` bigint,\n" +
                "   `user_id` bigint,\n" +
                "   `exam_score` decimal,\n" +
                "   `duration_sec` bigint,\n" +
                "   `exam_create_time` date,\n" +
                "   `submit_time` date,\n" +
                "   `exam_update_time` date,\n" +
                "   `exam_question_id` bigint,\n" +
                "   `answer` string,\n" +
                "   `is_correct` string,\n" +
                "   `exam_question_score` decimal\n" +
                ")"+ MyKafkaUtil_lj.getKafkaSinkDDL("dwd_user_test_exam_info_aab"));


        tableEnv.executeSql("insert into dwd_user_test_exam_info select * from result_table");

        //tableEnv.sqlQuery("select * from dwd_user_test_exam_info").execute().print();
    }
}
