package com.atguigu.app.dwd.db;

import com.atguigu.util.KafkaUtil;
import com.atguigu.util.MysqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author yhm
 * @create 2022-11-21 14:00
 */
public class CartAddInfoApp {
    public static void main(String[] args) {
        // 1. 创建env环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2. 对环境添加设置
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        /*
        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
        ));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");

        // 修改当前程序的用户名  获取写入hdfs的权限
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 1 读取kafka主题topic_db数据
        String topicName = "topic_db";
        String groupId = "CartAddInfoApp";
        tableEnv.executeSql("CREATE TABLE topicDb (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` STRING,\n" +
                "  `ts` bigint,\n" +
                "  `xid` bigint,\n" +
                "  `commit` boolean,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string> ,\n" +
                "  proc_time AS PROCTIME()\n" +
                ") " + KafkaUtil.getKafkaDDL(topicName,groupId));



        // TODO 3 读取base_dic表格数据
        tableEnv.executeSql(MysqlUtil.getBaseDicDDL());

        // TODO 2 过滤出cart_info表格数据
        // TODO 4 两张表格进行join
        Table table = tableEnv.sqlQuery("select \n" +
                "  data['id'] id,\n" +
                "  data['user_id'] user_id,\n" +
                "  data['sku_id'] sku_id,\n" +
                "  data['cart_price'] cart_price,\n" +
                "  if(`type` = 'insert' , cast(data['sku_num'] as int), cast(`data`['sku_num'] as int) - cast(`old`['sku_num'] as int )) sku_num,\n" +
                "  data['sku_name'] sku_name,\n" +
                "  data['is_checked'] is_checked,\n" +
                "  data['create_time'] create_time,\n" +
                "  data['operate_time'] operate_time,\n" +
                "  data['is_ordered'] is_ordered,\n" +
                "  data['order_time'] order_time,\n" +
                "  b.dic_name source_type,\n" +
                "  data['source_id'] source_id,\n" +
                "  ts \n" +
                "from topicDb t \n" +
                "join base_dic FOR SYSTEM_TIME AS OF t.proc_time  b\n" +
                "on t.data['source_type'] = b.dic_code\n" +
                "where t.`table`='cart_info' " +
                "and `type`='insert' or (\n" +
                        "`type`='update' and `old`['sku_num'] is not null and (cast(`data`['sku_num'] as int) - cast(`old`['sku_num'] as int) > 0 )\n" +
                        ")" );
        tableEnv.createTemporaryView("cart_info",table);

        // 如果需要下面的代码继续运行  需要把print注释掉
//        tableEnv.executeSql("select * from cart_info").print();

        // TODO 5 将结果写会到kafka中
        String topicSink = "dwd_trade_cart_add";
        tableEnv.executeSql("CREATE TABLE kafkaSink ( \n" +
                "id string,\n" +
                "user_id string,\n" +
                "sku_id string,\n" +
                "cart_price string,\n" +
                "sku_num int,\n" +
                "sku_name string,\n" +
                "is_checked string,\n" +
                "create_time string,\n" +
                "operate_time string,\n" +
                "is_ordered string,\n" +
                "order_time string,\n" +
                "source_type string,\n" +
                "source_id string,\n" +
                "ts bigint\n" +
                ") " + KafkaUtil.getKafkaSinkDDL(topicSink));

        tableEnv.executeSql("insert into kafkaSink select * from cart_info");

    }
}
