package com.edu.realtime.app.dwd.db;

import com.edu.realtime.util.MyKafkaUtil;
import com.sun.xml.internal.bind.v2.TODO;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author zcx
 * @create 2022-10-20 20:34
 * 交易域：支付成功事实表
 */
public class DwdTradePayDetailSuc {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

         /*// TODO 2. 检查点相关设置
        // 2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        // 2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        // 2.3 设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        // 2.4 设置job取消后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
        // 2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        // 2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://nameservice1:8020/gmall/ck");
        // 2.7 设置操作hadoop的用户
        System.setProperty("HADOOP_USER_NAME", "root");*/

        //TODO 3.从kafka的topic_db主题中读取数据 创建动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_pay_group"));
        //TODO 4.过滤出支付成功数据
        Table paymentInfo = tableEnv.sqlQuery("select\n" +
                "data['order_id'] order_id,\n" +
                "data['payment_type'] payment_type,\n" +
                "data['callback_time'] callback_time,\n" +
                "ts\n" +
                "from topic_db\n" +
                "where `table` = 'payment_info'\n" +
                "and data['payment_status']='1602'");
        tableEnv.createTemporaryView("payment_info", paymentInfo);

        //TODO 5.从下单表中读取下单数据
        tableEnv.executeSql("create table dwd_trade_order_detail(\n" +
                "id string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "province_id string,\n" +
                "date_id string,\n" +
                "create_time string,\n" +
                "final_amount string,\n" +
                "ts string,\n" +
                "row_op_ts timestamp_ltz(3))" + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail", "dwd_trade_pay_group"));

        //TODO 6.将上述2张表进行关联
        Table resultTable = tableEnv.sqlQuery("\n" +
                "select\n" +
                "od.id order_detail_id,\n" +
                "od.order_id,\n" +
                "od.user_id\n" +
                "od.course_id,\n" +
                "od.course_name,\n" +
                "od.province_id,\n" +
                "pi.payment_type payment_type_code,\n" +
                "pi.callback_time,\n" +
                "od.final_amount,\n" +
                "pi.ts\n" +
                "od.row_op_ts,\n" +
                "from dwd_trade_order_detail od\n" +
                "join payment_info pi\n" +
                "on pi.order_id = oi.id");
        tableEnv.createTemporaryView("result_table", resultTable);

        //TODO 7.创建动态表和要写入的主题进行映射
        tableEnv.executeSql("create table dwd_trade_pay_detail_suc(\n" +
                "order_detail_id string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "province_id string,\n" +
                "payment_type_code string,\n" +
                "callback_time string,\n" +
                "final_amount string,\n" +
                "ts string,\n" +
                "row_op_ts timestamp_ltz(3),\n" +
                "primary key(order_detail_id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_pay_detail_suc"));

        //TODO 9.写入
        tableEnv.executeSql("" +
                "insert into dwd_trade_pay_detail_suc select * from result_table");


    }
}
