package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author:ray
 * @time:2022/10/14 23:38
 * @description:
 **/
public class DwdTradeOrderPreProcess {
    public static void main(String[] args) {
        //TODO 1、环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(15*60+5));

        //TODO 2、检查点设置
        //env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/edu/ck");
//        System.setProperty("HADOOP_USER","atguigu");
        //TODO 3、读取topic_db数据
        String groupId = "dwd_trade_order_pre_group";
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL(groupId));


        //TODO 4、过滤出订单数据
        Table orderInfo = tableEnv.sqlQuery(
                "select `type`,\n" +
                        " ts oi_ts,\n" +
                        " `old`,\n" +
                        " data['id'] id,\n" +
                        " data['user_id'] user_id,\n" +
                        " data['order_status'] order_status,\n" +
                        " data['session_id'] session_id,\n" +
                        " data['province_id'] province_id,\n" +
                        " data['operate_time'] operate_time\n" +
                        " from topic_db\n" +
                        " where `table` = 'order_info'\n" +
                        " and (`type` = 'insert' or `type` = 'update')"

        );
        tableEnv.createTemporaryView("order_info",orderInfo);


        //TODO 5、过滤出订单明细数据
        Table orderDetail = tableEnv.sqlQuery(" select data['id'] id,\n" +
                " data['course_id'] course_id,\n" +
                " data['course_name'] course_name,\n" +
                " data['order_id'] order_id,\n" +
                " data['origin_amount'] origin_amount,\n" +
                " data['coupon_reduce'] coupon_reduce,\n" +
                " data['final_amount'] final_amount,\n" +
                " data['create_time'] create_time,\n" +
                " ts od_ts\n" +
                " from topic_db\n" +
                " where `table` = 'order_detail' and `type` = 'insert'"

        );
        tableEnv.createTemporaryView("order_detail",orderDetail);

        //TODO 6、关联订单表与订单明细表
        Table dwdTradeOrderPreProcess = tableEnv.sqlQuery(
                " select `type`,\n" +
                        " oi_ts,\n" +
                        " `old`,\n" +
                        " user_id,\n" +
                        " order_status,\n" +
                        " session_id,\n" +
                        " province_id,\n" +
                        " date_format(operate_time,'yyyy-MM-dd') operate_date,\n" +
                        " operate_time,\n" +
                        " \n" +
                        " od.id,\n" +
                        " course_id,\n" +
                        " course_name,\n" +
                        " order_id,\n" +
                        " origin_amount,\n" +
                        " coupon_reduce,\n" +
                        " final_amount,\n" +
                        " date_format(create_time,'yyyy-MM-dd') create_date,\n" +
                        " create_time,\n" +
                        " od_ts,\n" +

                        " current_row_timestamp() row_op_ts \n" +
                        "from order_info oi join order_detail od\n" +
                        "on oi.id = od.order_id"

        );

        tableEnv.executeSql("create table dwd_trade_order_pre_process(" +
                " `type` string,\n" +
                " oi_ts string,\n" +
                " `old` map<string,string>,\n" +
                " user_id string,\n" +
                " order_status string,\n" +
                " session_id string,\n" +
                " province_id string,\n" +
                " operate_date string,\n" +
                " operate_time string,\n" +
                " id string,\n" +
                " course_id string,\n" +
                " course_name string,\n" +
                " order_id string,\n" +
                " origin_amount string,\n" +
                " coupon_reduce string,\n" +
                " final_amount string,\n" +
                " create_date string,\n" +
                " create_time string,\n" +
                " od_ts string,\n" +
                "row_op_ts timestamp_ltz(3) ,\n" +
                "primary key(id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_pre_process"));

        //TODO 7、将关联的结果写到kafka的主题中
        dwdTradeOrderPreProcess.executeInsert("dwd_trade_order_pre_process");



    }
}
