package com.zhu.app.dwd;

import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlShowSlaveHostsStatement;
import com.zhu.utils.MySqlUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 建立 MySQL-Lookup 字典表，读取评论表数据，关联字典表以获取评价（好评、中评、
 * 差评、自动），将结果写入 Kafka 评价主题
 * 9.15.2 思路分析
 * 1）设置 ttl
 * 前文提到，与字典表关联时 ttl 的设置主要是考虑到从外部介质查询维度数据的时间，
 * 此处设置为 5s。
 * 2）筛选评论数据
 * 用户提交评论时评价表会插入一条数据，筛选操作类型为 insert 的数据即可。
 */
public class DWDInteractionCommentApp
{
    public static void main(String[] args) throws Exception {
        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(1); //  kafka partition 4

        //check point
           /*
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
        */
        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);
        Configuration configuration = streamTableEnvironment.getConfig().getConfiguration();
        configuration.setString("table.exec.state.ttl", "5 s");  //状态存活时间

        //todo topic_db
        streamTableEnvironment.executeSql(ZhuKafkaUtil.getTopicDB("dwd_interaction_favor_add"));

        //todo mysql base_dic
        streamTableEnvironment.executeSql(MySqlUtil.getBaseDicLooKupDDL());

        //todo get
        Table commentInfo = streamTableEnvironment.sqlQuery("select\n" +
                "data['id'] id,\n" +
                "data['user_id'] user_id,\n" +
                "data['sku_id'] sku_id,\n" +
                "data['order_id'] order_id,\n" +
                "data['create_time'] create_time,\n" +
                "data['appraise'] appraise,\n" +
                "pt \n" +
                "from topic_db\n" +
                "where `table` = 'comment_info'\n" +
                "and `type` = 'insert'\n");
       streamTableEnvironment.createTemporaryView("comment_info", commentInfo);

       //todo join

        Table resultTable = streamTableEnvironment.sqlQuery("select\n" +
                "ci.id,\n" +
                "ci.user_id,\n" +
                "ci.sku_id,\n" +
                "ci.order_id,\n" +
                "date_format(ci.create_time,'yyyy-MM-dd') date_id,\n" +
                "ci.create_time,\n" +
                "ci.appraise as appraise_code,\n" +
                "dic.dic_name as appraise_time\n" +

                "from comment_info ci\n" +
                "join\n" +
                "base_dic for system_time as of ci.pt as dic\n" +
                "on ci.appraise = dic.dic_code");
        streamTableEnvironment.createTemporaryView("result_table", resultTable);

        //todo connect to kafka
        streamTableEnvironment.executeSql("create table dwd_interaction_comment(\n" +
                                "id string,\n" +
                                "user_id string,\n" +
                                "sku_id string,\n" +
                                "order_id string,\n" +
                                "date_id string,\n" +
                                "create_time string,\n" +
                                "appraise_code string,\n" +
                                "appraise_name string\n" +
                                ")" +
                                ZhuKafkaUtil.getKafkaSinkDDL("dwd_interaction_comment"));


        // TODO 8. 将关联结果写入 Kafka-Connector 表
        streamTableEnvironment.executeSql("" +
                        "insert into dwd_interaction_comment select * from result_table").print();



        streamExecutionEnvironment.execute();
    }
}
