package com.nepu.gmall.realtime.app.dwd;

import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 *
 * 本类处理的是交易域下单事务事实表：
 *  数据的处理流程是：
 *      （1）从订单预处理表中读取数据
 *      （2）过滤出type = insert 类型的数据，取出对应的字段
 *      （3 将数据写入到kafka
 *
 * 本类数据的流向：
 * web/app --> mysql --> maxwell --> kafka --> DwdTradeOrderPreProcess --> kafka ---> DwdTradeOrderDetail
 * @author chenshuaijun
 * @create 2023-02-27 15:22
 */
public class DwdTradeOrderDetail {

    public static void main(String[] args) {
        // TODO 1、加载环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从订单预处理表中读取数据
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_trade_order_pre_process ( " +
                "    id string, " +
                "    order_id string, " +
                "    sku_id string, " +
                "    sku_name string, " +
                "    order_price string, " +
                "    sku_num string, " +
                "    create_time string, " +
                "    source_type_id string, " +
                "    source_type_name string, " +
                "    source_id string, " +
                "    split_total_amount string, " +
                "    split_activity_amount string, " +
                "    split_coupon_amount string, " +
                "    consignee string, " +
                "    consignee_tel string, " +
                "    total_amount string, " +
                "    order_status string, " +
                "    user_id string, " +
                "    payment_way string, " +
                "    delivery_address string, " +
                "    order_comment string, " +
                "    out_trade_no string, " +
                "    trade_body string, " +
                "    operate_time string, " +
                "    expire_time string, " +
                "    process_status string, " +
                "    tracking_no string, " +
                "    parent_order_id string, " +
                "    province_id string, " +
                "    activity_reduce_amount string, " +
                "    coupon_reduce_amount string, " +
                "    original_total_amount string, " +
                "    feight_fee string, " +
                "    feight_fee_reduce string, " +
                "    refundable_time string, " +
                "    order_detail_activity_id string, " +
                "    activity_id string, " +
                "    activity_rule_id string, " +
                "    order_detail_coupon_id string, " +
                "    coupon_id string, " +
                "    coupon_use_id string, " +
                "    `type` string, " +
                "    `old` map<string,string>," +
                "    `row_op_ts` TIMESTAMP_LTZ(3)" +
                //"    PRIMARY KEY (id) NOT ENFORCED " +
                ")" + KafkaUtils.getKafkaDDL("dwd_trade_order_pre_process","DwdTradeOrderDetail"));
        // TODO 3、过滤出下单的数据
        Table filterTable = tableEnv.sqlQuery("" +
                "select " +
                "    id, " +
                "    order_id, " +
                "    user_id, " +
                "    sku_id, " +
                "    sku_name, " +
                "    sku_num, " +
                "    order_price, " +
                "    province_id, " +
                "    activity_id, " +
                "    activity_rule_id, " +
                "    coupon_id, " +
                "    create_time, " +
                "    source_id, " +
                "    source_type_id, " +
                "    source_type_name, " +
                "    split_activity_amount, " +
                "    split_coupon_amount, " +
                "    split_total_amount, " +
                "    row_op_ts " +
                "from dwd_trade_order_pre_process " +
                "where `type`='insert'");
        // TODO 4、将数据写出到kafka
        tableEnv.createTemporaryView("resultTable",filterTable);

        tableEnv.executeSql("" +
                "create table dwd_trade_order_detail( " +
                "    id string, " +
                "    order_id string, " +
                "    user_id string, " +
                "    sku_id string, " +
                "    sku_name string, " +
                "    sku_num string, " +
                "    order_price string, " +
                "    province_id string, " +
                "    activity_id string, " +
                "    activity_rule_id string, " +
                "    coupon_id string, " +
                "    create_time string, " +
                "    source_id string, " +
                "    source_type_id string, " +
                "    source_type_name string, " +
                "    split_activity_amount string, " +
                "    split_coupon_amount string, " +
                "    split_total_amount string, " +
                "    row_op_ts TIMESTAMP_LTZ(3)" +
                ")" + KafkaUtils.getKafkaSinkDDL("dwd_trade_order_detail"));


        tableEnv.executeSql("insert into dwd_trade_order_detail select * from resultTable");

    }
}
