package com.wsjj.gmall;

import com.wsjj.gmall.base.BaseSQLApp;
import com.wsjj.gmall.constant.Constant;
import com.wsjj.gmall.util.SQLUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 *该类是使用  flinkSQL  来实现 互动域评论事实表
 */
public class DwdInteractionCommentInfo extends BaseSQLApp {
    public static void main(String[] args) {
        new DwdInteractionCommentInfo().start(10012,4,"wy");

    }

    @Override
    public void handle(StreamTableEnvironment streamTable) {

//        TODO .从kafka中读取数据（topic_db）
        readOdsTopicdb(streamTable);

//        第一种方式
//        streamTable.executeSql("select `data`['id'] id,\n" +
//                "`data`['user_id'] user_id,\n" +
//                "`data`['sku_id'] sku_id,\n" +
//                "`data`['spu_id'] spu_id,\n" +
//                "`data`['order_id'] order_id,\n" +
//                "`data`['appraise'] appraise,\n" +
//                "`data`['comment_txt'] comment_txe,\n" +
//                "ts,\n" +
//                "proc_tim\n" +
//                "from KafkaTable where `table`='comment_info' and (`type` = 'update' or `type` = 'insert')").print();

//        Table table = streamTable.sqlQuery("select * from KafkaTable");

//      第二种方式
        Table table = streamTable.sqlQuery("select `data`['id'] id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['sku_id'] sku_id,\n" +
                "`data`['spu_id'] spu_id,\n" +
                "`data`['order_id'] order_id,\n" +
                "`data`['appraise'] appraise,\n" +
                "`data`['comment_txt'] comment_txt,\n" +
                "ts,\n" +
                "proc_tim\n" +
                "from KafkaTable where `table`='comment_info' and (`type` = 'update' or `type` = 'insert')");

//        table.execute().print();
        streamTable.createTemporaryView("comment_info",table);



//        TODO .从hbase中读取字典表（维度退化）
        streamTable.executeSql("CREATE TABLE base_dic (\n" +
                " dic_code string,\n" +
                " info ROW<dic_name string>,\n" +
                " PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                ") "+SQLUtil.getHbaseDDL(Constant.HBASE_NAMESPACE,"dim_base_dic"));

//        streamTable.executeSql("select * from base_dic").print();



//        TODO .两个数据进行join，使用 lookup join

        Table table1 = streamTable.sqlQuery("SELECT id,user_id,sku_id,appraise,c.dic_name appraise_name,comment_txt,ts\n" +
                "FROM comment_info AS kfktb\n" +
                "JOIN base_dic FOR SYSTEM_TIME AS OF kfktb.proc_tim AS c\n" +
                "ON kfktb.appraise = c.dic_code");






//        TODO .写入到kafka中
        streamTable.executeSql("CREATE TABLE "+Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO+" (\n" +
                "  id string,\n" +
                "  user_id string,\n" +
                "  sku_id string,\n" +
                "  appraise string,\n" +
                "  appraise_name string,\n" +
                "  comment_txt string,\n" +
                "  ts bigint,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") "+SQLUtil.getUpdateKafkaDDL(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));


//        第一种写法：
//        streamTable.createTemporaryView("kafka_table",table1);
//        streamTable.executeSql("insert into "+ Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO+" select * from kafka_table");

//        第二种写法：
        table1.executeInsert(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);

    }


}
