package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/5/1 12:10
 */
public class DWD_08_DwdTradePayDetailSucApp extends BaseSQLApp {
    public static void main(String[] args) {
        new DWD_08_DwdTradePayDetailSucApp()
            .init("DwdTradePayDetailSucApp", 3007, 1, "DwdTradePayDetailSucApp");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        // 需要join 三张行表
        // 1. 订单明细表
        // 2. 从ods_db中过滤出支付表数据
        //  1. type会更新 2. 支付状态为1602
        // 3. 字典表
        
        
        // 1. 读取订单明细表
        tEnv.executeSql(
            "create table dwd_trade_order_detail( " +
                "id string, " +
                "order_id string, " +
                "user_id string, " +
                "sku_id string, " +
                "sku_name string, " +
                "province_id string, " +
                "activity_id string, " +
                "activity_rule_id string, " +
                "coupon_id string, " +
                "date_id string, " +
                "create_time string, " +
                "source_id string, " +
                "source_type_code string, " +
                "source_type_name string, " +
                "sku_num string, " +
                "split_original_amount string, " +
                "split_activity_amount string, " +
                "split_coupon_amount string, " +
                "split_total_amount string, " +
                "ts string, " +
                "row_op_ts timestamp_ltz(3) " +
                ")" + SQLUtil.getKafkaDDL("DwdTradePayDetailSucApp", "dwd_trade_order_detail"));
        
//        tEnv.sqlQuery("select * from dwd_trade_order_detail").execute().print();
        // TODO 4. 从 Kafka 读取业务数据，封装为 Flink SQL 表
        tEnv.executeSql(
            "create table topic_db(" +
                "`database` String, " +
                "`table` String, " +
                "`type` String, " +
                "`data` map<String, String>, " +
                "`old` map<String, String>, " +
                "`proc_time` as PROCTIME(), " +
                "`ts` string " +
                ")" + SQLUtil.getKafkaDDL("dwd_trade_pay_detail_suc", "ods_db"));
        
        // TODO 5. 筛选支付成功数据  操作
        Table paymentInfo = tEnv.sqlQuery(
            "select " +
                "data['user_id'] user_id, " +
                "data['order_id'] order_id, " +
                "data['payment_type'] payment_type, " +
                "data['callback_time'] callback_time, " +
                "`proc_time`, " +
                "ts " +
                "from topic_db " +
                "where `table` = 'payment_info' "
//                +
//                "and `type` = 'update' "  //
//                +
//                "and data['payment_status']='1602'"
        );
        tEnv.createTemporaryView("payment_info", paymentInfo);
        
        
        // TODO 6. 建立 MySQL-LookUp 字典表
        tEnv.executeSql(SQLUtil.getBaseDicLookUpDDL());
        
        // TODO 7. 关联 3 张表获得支付成功宽表
        Table resultTable = tEnv.sqlQuery(
            "select " +
                "od.id order_detail_id, " +
                "od.order_id, " +
                "od.user_id, " +
                "od.sku_id, " +
                "od.sku_name, " +
                "od.province_id, " +
                "od.activity_id, " +
                "od.activity_rule_id, " +
                "od.coupon_id, " +
                "pi.payment_type payment_type_code, " +
                "dic.dic_name payment_type_name, " +
                "pi.callback_time, " +
                "od.source_id, " +
                "od.source_type_code, " +
                "od.source_type_name, " +
                "od.sku_num, " +
                "od.split_original_amount, " +
                "od.split_activity_amount, " +
                "od.split_coupon_amount, " +
                "od.split_total_amount split_payment_amount, " +
                "pi.ts, " +
                "od.row_op_ts row_op_ts " +
                "from payment_info pi " +
                "join dwd_trade_order_detail od " +
                "on pi.order_id = od.order_id " +
                "left join `base_dic` for system_time as of pi.proc_time as dic " +
                "on pi.payment_type = dic.id");
        tEnv.createTemporaryView("result_table", resultTable);
        
        // TODO 8. 创建 Kafka dwd_trade_pay_detail 表
        tEnv.executeSql(
            "create table dwd_trade_pay_detail_suc( " +
                "order_detail_id string, " +
                "order_id string, " +
                "user_id string, " +
                "sku_id string, " +
                "sku_name string, " +
                "province_id string, " +
                "activity_id string, " +
                "activity_rule_id string, " +
                "coupon_id string, " +
                "payment_type_code string, " +
                "payment_type_name string, " +
                "callback_time string, " +
                "source_id string, " +
                "source_type_code string, " +
                "source_type_name string, " +
                "sku_num string, " +
                "split_original_amount string, " +
                "split_activity_amount string, " +
                "split_coupon_amount string, " +
                "split_payment_amount string, " +
                "ts string, " +
                "row_op_ts timestamp_ltz(3), " +
                "primary key(order_detail_id) not enforced " +
                ")" + SQLUtil.getUpsertKafkaDDL("dwd_trade_pay_detail_suc"));
        
        // TODO 9. 将关联结果写入 Upsert-Kafka 表
        tEnv.executeSql(
            "insert into dwd_trade_pay_detail_suc select * from result_table");
    }
}
