package com.atguigu.gmallrealtime.app.dwd.db;

import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author yhm
 * @create 2023-09-27 9:07
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 设置join的时候数据的存活时间
        // 在保证数据能够完全join上的前提下  ttl越小越好
        // 下单表数据没有业务时间间隔  只需要设置网络延迟即可
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10L));

        // TODO 2 添加检查点和状态后端
        //        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        //
        //        //2.2 设置检查点超时时间
        //        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //        //2.3 设置job取消之后 检查点是否保留
        //        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //        //2.4 设置两个检查点之间最小的时间间隔
        //        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //        //2.5 设置重启策略
        //        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        //        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //
        //        env.setStateBackend(new HashMapStateBackend());
        //        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //
        //        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取topic_db数据
        String groupId = "dwd_trade_order_detail";
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL(groupId));

        // TODO 4 筛选订单明细表数据
        Table odTable = tableEnv.sqlQuery("SELECT \n" +
                "  data['id'] id,\n" +
                "  data['order_id'] order_id,\n" +
                "  data['sku_id'] sku_id,\n" +
                "  data['sku_name'] sku_name,\n" +
                "  data['order_price'] order_price,\n" +
                "  data['sku_num'] sku_num,\n" +
                "  data['create_time'] create_time,\n" +
                "  data['source_type'] source_type,\n" +
                "  data['source_id'] source_id,\n" +
                "  data['split_total_amount'] split_total_amount,\n" +
                "  data['split_activity_amount'] split_activity_amount,\n" +
                "  data['split_coupon_amount'] split_coupon_amount,\n" +
                "  ts,\n" +
                "  proc_time\n" +
                "from topic_db\n" +
                "where `table`='order_detail'\n" +
                "and `type`='insert'");
        tableEnv.createTemporaryView("od",odTable);

        // TODO 5 筛选订单表数据
        Table oiTable = tableEnv.sqlQuery("SELECT \n" +
                "  data['id'] id,\n" +
                "  data['user_id'] user_id,\n" +
                "  data['province_id'] province_id\n" +
                "from topic_db\n" +
                "where `table`='order_info'\n" +
                "and `type`='insert'");
        tableEnv.createTemporaryView("oi",oiTable);

        // TODO 6 订单明细活动规则表
        Table odaTable = tableEnv.sqlQuery("SELECT \n" +
                "  data['order_detail_id'] order_detail_id,\n" +
                "  data['activity_id'] activity_id,\n" +
                "  data['activity_rule_id'] activity_rule_id\n" +
                "from topic_db\n" +
                "where `table`='order_detail_activity'\n" +
                "and `type`='insert'");

        tableEnv.createTemporaryView("oda",odaTable);

        // TODO 7 订单明细优惠券表
        Table odcTable = tableEnv.sqlQuery("SELECT \n" +
                "  data['order_detail_id'] order_detail_id,\n" +
                "  data['coupon_id'] coupon_id\n" +
                "from topic_db\n" +
                "where `table`='order_detail_coupon'\n" +
                "and `type`='insert'");

        tableEnv.createTemporaryView("odc",odcTable);

        // TODO 8 关联join起来
        Table resutlTable = tableEnv.sqlQuery("SELECT \n" +
                "    od.id,\n" +
                "    order_id,\n" +
                "    sku_id,\n" +
                "    sku_name,\n" +
                "    order_price,\n" +
                "    sku_num,\n" +
                "    create_time,\n" +
                "    source_type,\n" +
                "    source_id,\n" +
                "    activity_id,\n" +
                "    activity_rule_id,\n" +
                "    coupon_id,\n" +
                "    user_id,\n" +
                "    province_id,\n" +
                "    split_total_amount,\n" +
                "    split_activity_amount,\n" +
                "    split_coupon_amount,\n" +
                "    ts,\n" +
                "    proc_time\n" +
                "from od \n" +
                "join oi\n" +
                "on od.order_id = oi.id\n" +
                "left join oda \n" +
                "on od.id = oda.order_detail_id\n" +
                "left join odc\n" +
                "on od.id = odc.order_detail_id");
        tableEnv.createTemporaryView("result_table",resutlTable);

        // TODO 9 使用upsert kafka写出数据
        tableEnv.executeSql("create table kafka_sink(\n" +
                "    id STRING,\n" +
                "    order_id STRING,\n" +
                "    sku_id STRING,\n" +
                "    sku_name STRING,\n" +
                "    order_price STRING,\n" +
                "    sku_num STRING,\n" +
                "    create_time STRING,\n" +
                "    source_type STRING,\n" +
                "    source_id STRING,\n" +
                "    activity_id STRING,\n" +
                "    activity_rule_id STRING,\n" +
                "    coupon_id STRING,\n" +
                "    user_id STRING,\n" +
                "    province_id STRING,\n" +
                "    split_total_amount STRING,\n" +
                "    split_activity_amount STRING,\n" +
                "    split_coupon_amount STRING,\n" +
                "    ts BIGINT,\n" +
                "    proc_time TIMESTAMP(3),\n" +
                "    PRIMARY KEY (id) NOT ENFORCED\n" +
                ")"
                + MyKafkaUtil.getUpsertKafkaDLL(Constant.TOPIC_DWD_TRADE_ORDER_DETAIL));

        tableEnv.executeSql("insert into kafka_sink select * from result_table");

    }
}
