package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author 杨晨昱
 * 课程主题 评价情况
 */
public class DwdCourseCommentInfo {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_course_review_info_group"));

        Table reviewInfo = tableEnv.sqlQuery(
                "select \n" +
                        "data['id'] id,\n" +
                        "data['user_id'] user_id,\n" +
                        "data['course_id'] course_id,\n" +
                        "data['review_stars'] review_stars,\n" +
                        "data['deleted'] deleted,\n" +
                        "ts\n" +
                        " from `topic_db`\n " +
                        " where `table` = 'review_info'\n" +
                        " and `type` = 'insert' \n");
        tableEnv.createTemporaryView("review_info",reviewInfo);
        tableEnv.executeSql("select * from review_info");
         tableEnv.executeSql("CREATE TABLE dwd_course_review_info (\n" +
                "    id string,\n" +
                "    user_id string,\n" +
                "    course_id string,\n" +
                "    review_stars string,\n" +
                "    deleted string,\n" +
                "    ts string,\n" +
                "    PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_course_review_info")
        );
        tableEnv.executeSql("insert into dwd_course_review_info select * from review_info");
        tableEnv.executeSql("select * from dwd_course_review_info").print();
    }
}
