package com.atguigu.edu.realtime.app.dwd.db;


import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * ClassName: DwdTradeOrderDetail
 * Package: com.atguigu.edu.realtime.app.dwd.db
 * Description:
 *
 * @Author Mr.2
 * @Create 2023/9/8 14:45
 * @Version 1.0
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        // TODO 1. 基本流处理环境
        // 1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 设置并行度
        env.setParallelism(4);
        // 1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 1.4 设置状态的TTL
        // 从 业务数据是否需要滞后 和 数据传输延迟考虑
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10L));
        // TODO 2. 检查点相关的设置
        // 2.1 开启检查点
        // 2.2 设置 检查点超时时间
        // 2.3 设置 job取消后, 检查点是否保留
        // 2.4 设置 两个检查点之间 最小时间间隔
        // 2.5 设置 重启策略 -- 可以采用故障率的重启策略
        // 2.6 设置 状态后端 -- 一般设置HashMapStateBackend
        // 2.7 设置 操作 hadoop的用户

        // TODO 3. 从 kafka topic_db 读取数据, 创建动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_order_group"));

        // TODO 4. 过滤订单明细表(order_detail) -即:将不需要的字段过滤
        Table orderDetailTable = tableEnv.sqlQuery("select \n" +
                "  data['id'] id,\n" +
                "  data['course_id'] course_id,\n" +
                "  data['course_name'] course_name,\n" +
                "  data['order_id'] order_id,\n" +
                "  data['user_id'] user_id,\n" +
                "  data['origin_amount'] origin_amount,\n" +
                "  data['coupon_reduce'] coupon_reduce,\n" +
                "  data['final_amount'] final_amount,\n" +
                "  data['create_time'] create_time,\n" +
                "  ts\n" +
                "from `topic_db` \n" +
                "where `table` = 'order_detail' and `type` = 'insert'");
        // 中间表 注册表环境
        tableEnv.createTemporaryView("order_detail", orderDetailTable);
        // For test output -> yes
//        tableEnv.executeSql("select * from order_detail").print();

        // TODO 5. 过滤订单表(order_info) -即:将不需要的字段过滤
        Table orderInfoTable = tableEnv.sqlQuery("select\n" +
                "  data['id'] id,\n" +
                "  data['user_id'] user_id,\n" +
                "  data['order_status'] order_status,\n" +
                "  data['session_id'] session_id,\n" +
                "  data['province_id'] province_id\n" +
                "from `topic_db` \n" +
                "where `table` = 'order_info' and `type` = 'insert'");
        // 中间表 注册表环境
        tableEnv.createTemporaryView("order_info", orderInfoTable);

        // TODO 6. 将 上述 两张表 关联
        Table resuleTable = tableEnv.sqlQuery("select\n" +
                "  od.id,\n" +
                "  od.order_id,\n" +
                "  oi.user_id,\n" +
                "  od.course_id,\n" +
                "  od.course_name,\n" +
                "  oi.province_id,\n" +
                "  oi.session_id,\n" +
                "  date_format(od.create_time, 'yyyy-MM-dd') date_id,\n" +
                "  od.create_time,\n" +
                "  od.origin_amount,\n" +
                "  od.coupon_reduce,\n" +
                "  od.final_amount,\n" +
                "  od.ts\n" +
                "from order_detail od\n" +
                "join order_info oi\n" +
                "on od.order_id = oi.id");
        // 中间表 注册环境
        tableEnv.createTemporaryView("result_table", resuleTable);
        // For test output -> yes
//        tableEnv.executeSql("select * from result_table").print();

        // TODO 7. 将 关联的结果 写到 kafka topic
        // 6.1 创建动态表(和 关联后的表字段, 大概一致) 和 写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE dwd_trade_order_detail(\n" +
                "  id STRING,\n" +
                "  order_id STRING,\n" +
                "  user_id STRING,\n" +
                "  course_id STRING,\n" +
                "  course_name STRING,\n" +
                "  province_id STRING,\n" +
                "  session_id STRING,\n" +
                "  date_id STRING,\n" +
                "  create_time STRING,\n" +
                "  origin_amount STRING,\n" +
                "  coupon_reduce STRING,\n" +
                "  final_amount STRING,\n" +
                "  ts STRING,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_detail"));

        // 6.2 写入到 kafka topic
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from result_table");

    }
}
