package com.atguigu.app.dwd.db;

import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dwd11_Interaction_Comment_Info {
    public static void main(String[] args) {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.获取配置对象,并为关联时状态中存储的数据设置过期时间
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.exec.state.ttl", "5s");

        //3.从Kafka读取topic_db数据,封装为FlinkSQL表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_interaction_comment_info"));

        //4.读取章节评论表数据
        Table commentInfo = tableEnv.sqlQuery("" +
                "select " +
                "    data['id'] id, " +
                "    data['user_id'] user_id, " +
                "    data['chapter_id'] chapter_id, " +
                "    data['course_id'] course_id, " +
                "    data['comment_txt'] comment_txt, " +
                "    data['create_time'] create_time, " +
                "    data['deleted'] deleted  " +
                "from topic_db " +
                "where `table` = 'comment_info' " +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("comment_info", commentInfo);

        //tableEnv.sqlQuery("select * from comment_info").execute().print();
        //5.读取课程评价表数据
        Table reviewInfo = tableEnv.sqlQuery("" +
                "select " +
                "    data['id'] id, " +
                "    data['user_id'] user_id, " +
                "    data['course_id'] course_id, " +
                "    data['review_txt'] review_txt, " +
                "    data['review_stars'] review_stars, " +
                "    data['create_time'] create_time, " +
                "    data['deleted'] deleted, " +
                "    ts " +
                "from topic_db " +
                "where `table` = 'review_info' " +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("review_info", reviewInfo);
        //tableEnv.sqlQuery("select * from review_info").execute().print();

        //6.关联两表获得评论表
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "    ri.id, " +
                "    ri.user_id, " +
                "    ri.course_id, " +
                "    ci.chapter_id, " +
                "    ri.review_stars, " +
                "    ci.comment_txt, " +
                "    ri.ts " +
                "from review_info ri " +
                "join comment_info ci " +
                "on ri.user_id = ci.user_id "+
                " and ri.course_id = ci.course_id");
        tableEnv.createTemporaryView("result_table", resultTable);
        //tableEnv.sqlQuery("select * from result_table").execute().print();

        //7.创建Kafka-Connector dwd_interaction_comment_info表
        tableEnv.executeSql("" +
                "create table dwd_interaction_comment_info( " +
                "    id string, " +
                "    user_id string, " +
                "    course_id string, " +
                "    chapter_id string, " +
                "    review_stars string, " +
                "    comment_txt string, " +
                "    ts string " +
                "    )" + MyKafkaUtil.getKafkaSinkConnOption("dwd_interaction_comment_info"));

        //8.将关联结果写入Kafka-Connector表
        tableEnv.executeSql("" +
                "insert into dwd_interaction_comment_info select * from result_table");
    }
}
