package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneId;

/**
 * @author w
 * @create 2022-08-19-16:24
 */
public class DwdTradePayDetailSuc {
    public static void main(String[] args) throws Exception {
        //TODO 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment ( );
        env.setParallelism ( 1 );
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create ( env );

        tableEnv.getConfig ().setLocalTimeZone ( ZoneId.of ( "GMT+8" ) );

        //TODO 2.状态后端设置
        /*env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.setRestartStrategy(
                RestartStrategies.failureRateRestart(3, Time.days(1L), Time.minutes(3L))
        );
        env.setStateBackend(new HashMapStateBackend ());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        //TODO 3.读取kafka dwd_trade_order_detail主题数据,封装为FlinkSQL表
        tableEnv.executeSql("create table dwd_trade_order_detail( " +
                        "id STRING, " +
                        "course_id STRING, " +
                        "course_name STRING, " +
                        "order_id STRING, " +
                        "user_id STRING, " +
                        "order_status STRING, " +
                        "out_trade_no STRING, " +
                        "trade_body STRING, " +
                        "session_id STRING, " +
                        "province_id STRING, " +
                        "origin_amount STRING, " +
                        "coupon_reduce STRING, " +
                        "final_amount STRING, " +
                        "create_time STRING, " +
                        "date_id STRING, " +
                        "update_time STRING, " +
                        "update_time_id STRING, " +
                        "expire_time STRING, " +
                        "od_ts BIGINT, " +
                        "row_op_ts TIMESTAMP_LTZ(3) " +
                        ")" + MyKafkaUtil.getKafkaDDL ( "dwd_trade_order_detail","dwd_trade_pay_detail" ));

        //TODO 4.从kafka读取业务数据,封装为Flink SQL表
        tableEnv.executeSql("create table topic_db(" +
                "`database` String, " +
                "`table` String, " +
                "`type` String, " +
                "`data` map<String, String>, " +
                "`old` map<String, String>, " +
                "`proc_time` as PROCTIME(), " +
                "`ts` string " +
                ")" + MyKafkaUtil.getKafkaDDL("topic_db", "dwd_trade_pay_detail_suc"));

        //TODO 5.筛选支付成功数据
        Table paymentInfo = tableEnv.sqlQuery ( "select " +
                "data['user_id'] user_id, " +
                "data['order_id'] order_id, " +
                "data['payment_type'] payment_type, " +
                "data['callback_time'] callback_time, " +
                "`proc_time`, " +
                "ts " +
                "from topic_db " +
                "where `table` = 'payment_info' "
                +
                "and `type` = 'insert' " +
                "and data['payment_status']='1602'" );

        tableEnv.createTemporaryView ( "payment_info",paymentInfo );

        //TODO 6.关联表获取支付成功数据
        Table resultTable = tableEnv.sqlQuery ( "" +
                "select " +
                "pi.order_id order_id, " +
                "od.user_id user_id, " +
                "od.id order_detail_id, " +
                "od.course_id course_id, " +
                "od.course_name course_name, " +
                "od.order_status order_status, " +
                "od.out_trade_no out_trade_no, " +
                "od.trade_body trade_body, " +
                "od.session_id session_id, " +
                "od.province_id province_id, " +
                "od.origin_amount origin_amount, " +
                "od.coupon_reduce coupon_reduce, " +
                "od.final_amount final_amount, " +
                "od.create_time create_time, " +
                "od.date_id date_id, " +
                "od.update_time update_time, " +
                "od.update_time_id update_time_id, " +
                "pi.payment_type payment_type, " +
                "od.expire_time expire_time , " +
                "od.od_ts  od_ts, " +
                "pi.callback_time, " +
                "pi.ts, " +
                "od.row_op_ts row_op_ts " +
                "from dwd_trade_order_detail od " +
                "join payment_info pi " +
                "on pi.order_id = od.order_id "
        );
        tableEnv.createTemporaryView ( "result_table",resultTable );

        //TODO 7.创建kafka dwd_trade_pay_detail表
        tableEnv.executeSql ( "" + "create table dwd_trade_pay_detail_suc(" +
                "order_id String, " +
                "user_id String, " +
                "order_detail_id String, " +
                "course_id String, " +
                "course_name String, " +
                "order_status String, " +
                "out_trade_no String, " +
                "trade_body String, " +
                "session_id String, " +
                "province_id String, " +
                "origin_amount String, " +
                "coupon_reduce String, " +
                "final_amount String, " +
                "create_time String, " +
                "date_id String, " +
                "update_time String, " +
                "update_time_id String, " +
                "payment_type String, " +
                "update_time_id String, " +
                "expire_time String, " +
                "od_ts BIGINT, " +
                "payment_type String, " +
                "callback_time String, " +
                "ts String, " +
                "row_op_ts TIMESTAMP_LTZ(3) " +
                ")"+ MyKafkaUtil.getInsertKafkaDDL ( "dwd_trade_pay_detail_suc" ) );

        //TODO 8.将关联结果写入 Upsert-Kafka 表
        tableEnv.executeSql ( "insert into dwd_trade_pay_detail_suc select * from result_table" );

    }
}
