package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author Felix
 * @date 2023/4/18
 * 交易域：下单事实表
 * 需要启动的进程
 *      zk、kafka、maxwell、DwdTradeOrderDetail
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4 设置状态的失效时间
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        /*//TODO 2.检查点相关的设置
        //2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.3 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        //2.4 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.5 操作hadoop的用户
        System.setProperty("HADOOP_USER_NAME","atguigu");
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("xxx");
        //2.6 设置job取消后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
        */
        //TODO 3.从kafka的topic_db读取数据 创建动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_order_detail_group"));


        //TODO 4.过滤出订单明细数据
        Table orderDetail = tableEnv.sqlQuery(
                " select \n"+
                " data['id'] id,\n"+
                " data['order_id'] order_id,\n"+
                " data['course_id'] course_id,\n"+
                " data['course_name'] course_name,\n"+
                " data['create_time'] create_time,\n"+
                " data['user_id'] user_id,\n"+
                " data['final_amount']  final_amount,\n"+
                " ts\n"+
                "from `topic_db`\n"+
                "where `table`='order_detail' and `type` = 'insert'"
        );
        tableEnv.createTemporaryView("order_detail", orderDetail);
        //tableEnv.executeSql("select * from order_detail").print();

        //TODO 5.过滤出订单数据
        Table orderInfo = tableEnv.sqlQuery(
                " select \n"+
                " data['id'] id,\n"+
                " data['user_id'] user_id,\n"+
                " data['province_id'] province_id\n"+
                " from `topic_db`\n"+
                " where `table` = 'order_info' and `type`='insert'"
        );
        tableEnv.createTemporaryView("order_info", orderInfo);

        //tableEnv.executeSql("select * from order_info").print();

        //TODO 6.过滤出订单明细活动数据(无优化活动)
        /*Table orderDetailActivity = tableEnv.sqlQuery("select \n" +
            "data['order_detail_id'] order_detail_id,\n" +
            "data['activity_id'] activity_id,\n" +
            "data['activity_rule_id'] activity_rule_id\n" +
            "from `topic_db`\n" +
            "where `table` = 'order_detail_activity'\n" +
            "and `type` = 'insert'\n");
        tableEnv.createTemporaryView("order_detail_activity", orderDetailActivity);*/

        //TODO 7.过滤出订单明细优惠券数据(无优惠券)
        /*Table orderDetailCoupon = tableEnv.sqlQuery("select\n" +
            "data['order_detail_id'] order_detail_id,\n" +
            "data['coupon_id'] coupon_id\n" +
            "from `topic_db`\n" +
            "where `table` = 'order_detail_coupon'\n" +
            "and `type` = 'insert'\n");
        tableEnv.createTemporaryView("order_detail_coupon", orderDetailCoupon);*/

        //TODO 8.将上述4张表进行关联(两张表)
        Table resultTable = tableEnv.sqlQuery(
                " select\n"+
                " od.id,\n"+
                " od.order_id,\n"+
                " od.course_id,\n"+
                " od.course_name,\n"+
                //" od.create_time,\n"+
                " date_format(od.create_time,'yyyy-MM-dd') date_id,\n"+
                " od.user_id,\n"+
                " od.final_amount,\n"+
                " od.ts,\n"+
                " oi.province_id\n"+
                " from order_detail od \n"+
                " join order_info oi \n"+
                " on od.order_id = oi.id"
        );
        tableEnv.createTemporaryView("result_table", resultTable);

        //tableEnv.executeSql("select * from result_table").print();
        //TODO 9.将关联的结果写到kafka主题
        //9.1 创建动态表和要写入的kafka主题进行映射
        tableEnv.executeSql(
                " create table dwd_trade_order_detail(\n"+
                " id string,\n"+
                " order_id string,\n"+
                " course_id string,\n"+
                " course_name string,\n"+
                " date_id string,\n"+
                " user_id string,\n"+
                " final_amount string,\n"+
                " ts string,\n"+
                " province_id string,\n"+
                " primary key(id) not enforced\n"+
                " )\n" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_detail"));

        //9.2 写入
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from result_table");
    }
}
