package com.atguigu.gmallrealtime.app.dwd.db;

import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import sun.java2d.windows.GDIRenderer;

/**
 * @author yhm
 * @create 2023-09-26 15:16
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 添加检查点和状态后端
        //        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        //
        //        //2.2 设置检查点超时时间
        //        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //        //2.3 设置job取消之后 检查点是否保留
        //        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //        //2.4 设置两个检查点之间最小的时间间隔
        //        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //        //2.5 设置重启策略
        //        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        //        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //
        //        env.setStateBackend(new HashMapStateBackend());
        //        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //
        //        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取ods层原始数据
        String groupId = "dwd_trade_cart_add";
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL(groupId));

        // TODO 4 筛选加购数据
        Table resultTabel = tableEnv.sqlQuery("SELECT \n" +
                "    data['id'] id,\n" +
                "    data['user_id'] user_id,\n" +
                "    data['sku_id'] sku_id,\n" +
                "    data['cart_price'] cart_price,\n" +
                "    if(`type`='insert',data['sku_num'],\n" +
                "     cast( (cast(data['sku_num'] as bigint) - cast(`old`['sku_num'] as bigint))  as string)) sku_num,\n" +
                "    data['sku_name'] sku_name,\n" +
                "    data['is_checked'] is_checked,\n" +
                "    data['create_time'] create_time,\n" +
                "    data['operate_time'] operate_time,\n" +
                "    data['is_ordered'] is_ordered,\n" +
                "    data['order_time'] order_time,\n" +
                "    data['source_type'] source_type,\n" +
                "    data['source_id'] source_id,\n" +
                "    ts ,\n" +
                "    proc_time \n" +
                "from topic_db\n" +
                "where `table`='cart_info'\n" +
                "and (`type`='insert' or (`type`='update' and `old`['sku_num'] is not null and \n" +
                "\tcast(`data`['sku_num'] as bigint)> cast(`old`['sku_num'] as bigint) )) ;\n");

        tableEnv.createTemporaryView("result_table",resultTabel);

        // TODO 5 写出到kafka中
        String sinkTopic = Constant.TOPIC_DWD_TRADE_CART_ADD;
        tableEnv.executeSql("create table result_kafka(\n" +
                "    id STRING,\n" +
                "    user_id STRING,\n" +
                "    sku_id STRING,\n" +
                "    cart_price STRING,\n" +
                "    sku_num STRING,\n" +
                "    sku_name STRING,\n" +
                "    is_checked STRING,\n" +
                "    create_time STRING,\n" +
                "    operate_time STRING,\n" +
                "    is_ordered STRING,\n" +
                "    order_time STRING,\n" +
                "    source_type STRING,\n" +
                "    source_id STRING,\n" +
                "    ts BIGINT,\n" +
                "    proc_time TIMESTAMP(3)\n" +
                ")"
                + MyKafkaUtil.getKafkaDDL(sinkTopic, groupId));

        tableEnv.executeSql("insert into result_kafka select * from result_table");


    }
}
