package com.atguigu.app.dwd;

import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @className: DwdTradeOrderDetail
 * @author: LinCong
 * @description:
 * @date: 2023/2/3 13:35
 * @version: 1.0
 */

//业务服务器（mysql）-> maxwell -> kafka -> flink(DwdTradeOrderPreProcess) -> kafka -> flink(DwdTradeOrderDetail) -> kafka
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
//        1、获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        ////        1.1、开启checkpoint
//        env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
//        //设置checkpoint的超时时间,如果 Checkpoint在 10分钟内尚未完成说明该次Checkpoint失败,则丢弃。(默认10分钟)
//        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(120000L);
//        //固定延迟重启   （最多重启次数，每次重启的时间间隔）
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
////        1.2、设置状态后端
//        env.setStateBackend(new HashMapStateBackend());
//        System.setProperty("HADOOP_USER_NAME", "kevin");
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop3cluster/211126/ck");

//        2、消费kafka 订单预处理主题 dwd_trade_order_pre_process 的数据
        tableEnv.executeSql("" +
                "create table dwd_order_pre( " +
                "    `id` string, " +
                "    `order_id` string, " +
                "    `sku_id` string, " +
                "    `sku_name` string, " +
                "    `order_price` string, " +
                "    `sku_num` string, " +
                "    `create_time` string, " +
                "    `source_type_id` string, " +
                "    `source_type_name` string, " +
                "    `source_id` string, " +
                "    `split_total_amount` string, " +
                "    `split_activity_amount` string, " +
                "    `split_coupon_amount` string, " +
                "    `consignee` string, " +
                "    `consignee_tel` string, " +
                "    `total_amount` string, " +
                "    `order_status` string, " +
                "    `user_id` string, " +
                "    `payment_way` string, " +
                "    `delivery_address` string, " +
                "    `order_comment` string, " +
                "    `out_trade_no` string, " +
                "    `trade_body` string, " +
                "    `operate_time` string, " +
                "    `expire_time` string, " +
                "    `process_status` string, " +
                "    `tracking_no` string, " +
                "    `parent_order_id` string, " +
                "    `province_id` string, " +
                "    `activity_reduce_amount` string, " +
                "    `coupon_reduce_amount` string, " +
                "    `original_total_amount` string, " +
                "    `feight_fee` string, " +
                "    `feight_fee_reduce` string, " +
                "    `refundable_time` string, " +
                "    `order_detail_activity_id` string, " +
                "    `activity_id` string, " +
                "    `activity_rule_id` string, " +
                "    `order_detail_coupon_id` string, " +
                "    `coupon_id` string, " +
                "    `coupon_use_id` string, " +
                "    `type` string, " +
                "    `old` map<string,string>, " +
                "    `row_op_ts` TIMESTAMP_LTZ(3) " +
                ")"  + MyKafkaUtil.getKafkaDDL("dwd_trade_order_pre_process","order_detail"));

//        3、过滤出下单数据，即新增数据
        Table filteredTable = tableEnv.sqlQuery("" +
                "select " +
                "id,  " +
                "order_id,  " +
                "user_id,  " +
                "sku_id,  " +
                "sku_name,  " +
//                +++
                "sku_num,  " +
//                +++
                "order_price,  " +
                "province_id,  " +
                "activity_id,  " +
                "activity_rule_id,  " +
                "coupon_id,  " +
//                "date_id,  " +
                "create_time,  " +
                "source_id,  " +
//                "source_type_code,  " +
                "source_type_id,  " +
                "source_type_name,  " +
//                "sku_num,  " +
//                "split_original_amount,  " +
                "split_activity_amount,  " +
                "split_coupon_amount,  " +
//                删除逗号
                "split_total_amount,  " +
//                "od_ts ts,  " +
                "row_op_ts  " +
                "from dwd_order_pre " +
                "where `type`='insert'");
        tableEnv.createTemporaryView("filtered_table", filteredTable);

//        4、创建表dwd层下单数据表
        tableEnv.executeSql("" +
                "create table dwd_trade_order_detail(  " +
                "id string,  " +
                "order_id string,  " +
                "user_id string,  " +
                "sku_id string,  " +
                "sku_name string,  " +
//                +++
                "sku_num string,  " +
//                +++
                "order_price string,  " +
                "province_id string,  " +
                "activity_id string,  " +
                "activity_rule_id string,  " +
                "coupon_id string,  " +
//                "date_id string,  " +
                "create_time string,  " +
                "source_id string,  " +
//                "source_type_code string,  " +
                "source_type_id string,  " +
                "source_type_name string,  " +
//                "sku_num string,  " +
//                "split_original_amount string,  " +
                "split_activity_amount string,  " +
                "split_coupon_amount string,  " +
//                删除逗号
                "split_total_amount string,  " +
//                "ts string,  " +
                "row_op_ts timestamp_ltz(3)  " +
                ")" + MyKafkaUtil.getKafkaSinkDDL("dwd_trade_order_detail"));
        
//        5、将数据写入到kafka
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from filtered_table")
                .print();

//        6、启动任务
    }
}
