package com.atguigu.gmall.realtime.dwd.app;

/*import com.atguigu.gmall.realtime.common.base.BaseSqlApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.util.FlinkSqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo extends BaseSqlApp {
    public static void main(String[] args) {
        new DwdInteractionCommentInfo().start(10012, 4, "dwd_interaction_comment_info");
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv, StreamExecutionEnvironment env) {
        // 从kafka topic_db表中读取数据
        readOdsTopicDb(tableEnv, "dwd_interaction_comment_info");
        // 过滤出评论数据
        filterCommentInfo(tableEnv);

        // 从Hbase中读取字典表数据
        readDimBaseDic(tableEnv);

        // tableEnv.sqlQuery(" select dic_code, info.dic_name from dim_base_dic ").execute().print();

        // 使用LookupJoin的方式, 关联评论数据和字典表数据, 将字典表的信息退化到事实表中
        Table joinTable = lookupJoin(tableEnv);

        // 写出到kafka
        writeToKafka(tableEnv, joinTable);

    }

    private static void writeToKafka(StreamTableEnvironment tableEnv, Table joinTable) {
        String createSinkTable =
                " create table  " + Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO + " ( " +
                        "id STRING," +
                        "user_id STRING," +
                        "nick_name STRING" +
                        "sku_id STRING," +
                        "spu_id STRING," +
                        "order_id STRING," +
                        "appraise_name STRING," +
                        "comment_txt STRING," +
                        "ts BIGINT," +
                        " PRIMARY KEY (id) NOT ENFORCED )" + FlinkSqlUtil.getUpsertKafkaSinkDDl(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);

        tableEnv.executeSql(createSinkTable);

        joinTable.executeInsert(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);
    }

    private static Table lookupJoin(StreamTableEnvironment tableEnv) {
        String joinSql =
                "select " +
                        " ci.id," +
                        " ci.user_id," +
                        " ci.nick_name," +
                        " ci.sku_id," +
                        " ci.spu_id," +
                        " ci.order_id," +
                        " dbd.info.dic_name as appraise_name," +
                        " ci.comment_txt," +
                        " ci.ts as ts" +
                        " from comment_info AS ci " +
                        " join dim_base_dic for SYSTEM_TIME AS OF ci.pt AS dbd " +
                        " ON ci.appraise = dbd.dic_code ";

        Table joinTable = tableEnv.sqlQuery(joinSql);
        return joinTable;
    }

    private static void readDimBaseDic(StreamTableEnvironment tableEnv) {
        String createBaseDicSql =
                "create table dim_base_dic (" +
                        " dic_code STRING, " +
                        " info ROW<dic_name STRING> , " +
                        " PRIMARY KEY (dic_code) NOT ENFORCED " +
                        " ) " + FlinkSqlUtil.getHbaseDDL(Constant.HBASE_NAMESPACE, "dim_base_dic");
        tableEnv.executeSql(createBaseDicSql);
    }

    private static void filterCommentInfo(StreamTableEnvironment tableEnv) {
        // 评论不允许修改, 只有insert
        // 条件: database = 'gmall' and table = 'comment_info' and type = 'insert'
        String filterCommentInfoSql =
                " select " +
                        "`data`['id'] as id , " +
                        "`data`['user_id'] as user_id ," +
                        "`data`['nick_name'] as nick_name," +
                        "`data`['sku_id'] as sku_id," +
                        "`data`['spu_id'] as spu_id," +
                        "`data`['order_id'] as order_id ," +
                        "`data`['appraise'] as appraise," +
                        "`data`['comment_txt'] as comment_txt," +
                        " ts ," +
                        " pt " +
                        " from topic_db " +
                        " where `database` = 'gmall' " +
                        " and `table` = 'comment_info' " +
                        " and `type` = 'insert'" ;
        // tableEnv.sqlQuery(filterCommentInfoSql).execute().print();
        Table commentInfoTable = tableEnv.sqlQuery(filterCommentInfoSql);
        tableEnv.createTemporaryView("comment_info", commentInfoTable);
    }
}*/
import com.atguigu.gmall.realtime.common.base.BaseSqlApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.util.FlinkSqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author WEIYUNHUI
 * @date 2024/8/3 15:12
 */
public class DwdInteractionCommentInfo extends BaseSqlApp {
    public static void main(String[] args) {
        new DwdInteractionCommentInfo().start(10012, 4 , "dwd_interaction_comment_info");
    }
    @Override
    public void handle(StreamTableEnvironment tableEnv, StreamExecutionEnvironment env) {
        // 从kafka topic_db表中读取数据
        readOdsTopicDb(tableEnv , "dwd_interaction_comment_info");

        // 过滤出评论数据
        filterCommentInfo(tableEnv);

        //从Hbase中读取字典表数据，
        readDimBaseDic(tableEnv);
        //tableEnv.sqlQuery( "select dic_code ,info.dic_name from dim_base_dic ").execute().print();

        // 使用LookupJoin的方式 ， 关联评论数据和字典表数据，将字典的信息退化到事实表中
        Table joinTable = lookupJoin(tableEnv);

        // 写出到kafka
        writeToKafka(tableEnv, joinTable);

    }

    private static void writeToKafka(StreamTableEnvironment tableEnv, Table joinTable) {
        String createSinkTable =
                " create table " + Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO + " ( " +
                        "  id STRING," +
                        "  user_id STRING, " +
                        "  nick_name STRING, " +
                        "  sku_id STRING, " +
                        "  spu_id STRING, " +
                        "  order_id STRING, " +
                        "  appraise_name STRING ," +
                        "  comment_txt STRING , "+
                        "  ts BIGINT ,  "  +
                        "  PRIMARY KEY (id) NOT ENFORCED "   +
                        " ) " + FlinkSqlUtil.getUpsertKafkaSinkDDl(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO) ;

        tableEnv.executeSql(createSinkTable) ;

        joinTable.executeInsert( Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO  );
    }

    private static Table lookupJoin(StreamTableEnvironment tableEnv) {
        String joinSql =
                "SELECT " +
                        " ci.id," +
                        " ci.user_id," +
                        " ci.nick_name," +
                        " ci.sku_id," +
                        " ci.spu_id," +
                        " ci.order_id," +
                        " dbd.info.dic_name as appraise_name," +
                        " ci.comment_txt, " +
                        " ci.ts as ts " +
                        " FROM comment_info AS ci " +
                        " JOIN dim_base_dic FOR SYSTEM_TIME AS OF ci.pt AS dbd " +
                        " ON ci.appraise = dbd.dic_code" ;

        Table joinTable = tableEnv.sqlQuery(joinSql);
        return joinTable;
    }

    /*private static void readDimBaseDic(StreamTableEnvironment tableEnv) {
        String createBaseDicSql =
                " create table dim_base_dic (" +
                        " dic_code STRING , " +
                        " info ROW<dic_name STRING> , " +
                        " PRIMARY KEY (dic_code) NOT ENFORCED " +
                        ") " + FlinkSqlUtil.getHbaseDDL(Constant.HBASE_NAMESPACE , "dim_base_dic");

        tableEnv.executeSql(createBaseDicSql);
    }*/

    private static void filterCommentInfo(StreamTableEnvironment tableEnv) {
        // 评论不允许修改，只有insert
        // 条件: database = 'gmall' and  table = 'comment_info' and type = 'insert'
        String filterCommentInfoSql =
                " select " +
                        "`data`['id'] as id , " +
                        "`data`['user_id'] as user_id ," +
                        "`data`['nick_name'] as nick_name," +
                        "`data`['sku_id'] as sku_id," +
                        "`data`['spu_id'] as spu_id," +
                        "`data`['order_id'] as order_id ," +
                        "`data`['appraise'] as appraise," +
                        "`data`['comment_txt'] as comment_txt," +
                        " ts ," +
                        " pt " +
                        " from topic_db " +
                        " where `database` = 'gmall' " +
                        " and `table` = 'comment_info' " +
                        " and `type` = 'insert'" ;

        Table commentInfoTable = tableEnv.sqlQuery(filterCommentInfoSql);
        tableEnv.createTemporaryView("comment_info" , commentInfoTable);
    }
}