package com.bw.gmall.realtime.dwd.db.app;

import com.bw.gmall.realtime.common.base.BaseSqlApp;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo1 extends BaseSqlApp {
    public static void main(String[] args) throws Exception {
        new DwdInteractionCommentInfo1().start(10012, 4, Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, String groupId) {
        // 设置状态存活时间，因为咱们用的look up join ，不需要设置
//        tableEnv.getConfig().setIdleStateRetention();
        // 1.读取Kafka topic_db ODS数据
        readOdsDb(tableEnv, groupId);
        // 2.过滤评论数据
        Table comment = tableEnv.sqlQuery("select \n" +
                "  `data`['id'] id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['nick_name'] nick_name,\n" +
                "  `data`['sku_id'] sku_id,\n" +
                "  `data`['spu_id'] spu_id,\n" +
                "  `data`['order_id'] order_id,\n" +
                "  `data`['comment_txt'] comment_txt,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['appraise'] appraise,\n" +
                "   proc_time\n" +
                "from topic_db\n" +
                "where `database` = 'gmall'\n" +
                "and `table` = 'comment_info'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("comment_info", comment);
        // 3.读取码表数据
        createBaseDic(tableEnv);
        // 4、关联数据
        Table table = tableEnv.sqlQuery("SELECT \n" +
                "  id,\n" +
                "  user_id,\n" +
                "  nick_name,\n" +
                "  sku_id,\n" +
                "  spu_id,\n" +
                "  order_id,\n" +
                "  comment_txt,\n" +
                "  create_time,\n" +
                "  c.appraise appraise_code,\n" +
                "  b.dic_name appraise_name\n" +
                "FROM comment_info AS c\n" +
                "JOIN base_dic FOR SYSTEM_TIME AS OF c.proc_time AS b\n" +
                "ON c.appraise = b.rowkey");

        // 5、写到Kafka
        tableEnv.executeSql("create table " + Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO + "(" +
                "  id STRING,\n" +
                "  user_id STRING,\n" +
                "  nick_name STRING,\n" +
                "  sku_id STRING,\n" +
                "  spu_id STRING,\n" +
                "  order_id STRING,\n" +
                "  appraise_code STRING,\n" +
                "  appraise_name STRING,\n" +
                "  comment_txt STRING,\n" +
                "  create_time STRING" +
                ")"
                + SQLUtil.getKafkaSinkSQL(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));

        // 写到对应的kafka表中
        table.insertInto(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO).execute().print();

    }


}
