package com.atguigu.edu.realtime.dwd.db.app;

import com.atguigu.edu.realtime.common.base.BaseSQLApp;
import com.atguigu.edu.realtime.common.constant.Constant;
import com.atguigu.edu.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo extends BaseSQLApp {
    public static void main(String[] args) {
        new DwdInteractionCommentInfo().start(10012,4,"dwd_interaction_comment_info");
    }
    @Override
    public void handle1(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv) {
        readOdsDS(tableEnv,"comment");

        Table rowTable = tableEnv.sqlQuery("select `data`['id'] id,\n" +
                "       `data`['user_id'] user_id,\n" +
                "\t   `data`['chapter_id'] chapter_id,\n" +
                "\t   `data`['course_id'] course_id,\n" +
                "\t   `data`['comment_txt'] comment_txt,\n" +
                "\t   `data`['create_time'] create_time,\n" +
                "\t   `data`['deleted'] deleted,\n" +
                "\t   ts\n" +
                "\t   from topic_db where `table`='comment_info' and `type`='insert'");
       tableEnv.createTemporaryView("a",rowTable);
       //tableEnv.executeSql("select * from rowTable").print();

        Table rowTable1 = tableEnv.sqlQuery("select `data`['id'] id,\n" +
                "       `data`['user_id'] user_id,\n" +
                "\t   `data`['course_id'] course_id,\n" +
                "\t   `data`['review_stars'] review_stars,\n" +
                "\t   ts\n" +
                "\t   from topic_db where `table`='review_info' and `type`='insert'");
        tableEnv.createTemporaryView("b",rowTable1);
        //tableEnv.executeSql("select * from stars").print();

        Table resultTable = tableEnv.sqlQuery("select a.id,\n" +
                "       a.user_id user_id,\n" +
                "       a.chapter_id chapter_id,\n" +
                "       a.course_id course_id,\n" +
                "       a.comment_txt comment_txt,\n" +
                "       b.review_stars review_stars,\n" +
                "       a.create_time create_time,\n" +
                "       a.ts ts\n" +
                "from a\n" +
                "join b\n" +
                "on a.user_id=b.user_id and a.course_id=b.course_id");
        tableEnv.createTemporaryView("resultTable",resultTable);
        Table table = tableEnv.sqlQuery("select * from resultTable");
        //+table.execute().print();


        tableEnv.executeSql("create table "+ Constant.DWD_INTER_COMMENT +"(\n" +
                "id string,\n" +
                "user_id string,\n" +
                "chapter_id string,\n" +
                "course_id string,\n" +
                "comment_txt string,\n" +
                "review_stars string,\n" +
                "create_time string,\n" +
                "ts bigint,\n" +
                "PRIMARY KEY (id) NOT ENFORCED )"+ //SQLUtil.getUpsertKafkaDDL(Constant.DWD_INTER_COMMENT));
                "WITH (\n" +
                        "  'connector' = 'upsert-kafka',\n" +
                        "  'topic' = '"+Constant.DWD_INTER_COMMENT+"',\n" +
                        "  'properties.bootstrap.servers' = 'hadoop104:9092',\n" +
                        "  'key.format' = 'json',\n" +
                        "  'value.format' = 'json'\n" +
                        ");");


       table.executeInsert(Constant.DWD_INTER_COMMENT);


    }

}
