package com.zhu.app.dwd;

import com.zhu.utils.MySqlUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneId;

/**
 * dwd_order_detail mysql lookup_dic payment_info
 * 支付成功事务事实表需要将业务数据库中的支付信息表 payment_info 数据与订单明
 * 细表关联。订单明细数据是在下单时生成的，经过一系列的处理进入订单明细主题，通常支
 * 付操作在下单后 15min 内完成即可，因此，支付明细数据可能比订单明细数据滞后 15min。
 * 考虑到可能的乱序问题，ttl 设置为 15min + 5s。
 *
 * 内连接 保证数都有  支付表中数据是根据订单表而生成的
 */
public class DWDTradePayDetailSucApp {

    public static void main(String[] args) throws Exception {

        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4); //kafka partition 4
        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);
        streamTableEnvironment.getConfig().setLocalTimeZone(ZoneId.of("GMT+8"));
        // 获取配置对象
        Configuration configuration = streamTableEnvironment.getConfig().getConfiguration();
// 为表关联时状态中存储的数据设置过期时间
        configuration.setString("table.exec.state.ttl", "905 s");

        //check point
        /*
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
        */

        //todo dwd_trade_order_detail kafka topic read
        streamTableEnvironment.executeSql(
                "create table dwd_trade_order_detail( " +
                        "id string, " +
                        "order_id string, " +
                        "user_id string, " +
                        "sku_id string, " +
                        "sku_name string, " +
                        "province_id string, " +
                        "activity_id string, " +
                        "activity_rule_id string, " +
                        "coupon_id string, " +
                        "source_id string, " +
                        "source_type_code string, " +
                        "source_type_name string, " +
                        "sku_num string, " +
                        "split_original_amount string, " +
                        "split_activity_amount string, " +
                        "split_coupon_amount string, " +
                        "split_total_amount string, " +
                        "row_op_ts timestamp_ltz(3) " +
                        ")" + ZhuKafkaUtil.getKafkaDDL("dwd_trade_order_detail","dwd_trade_pay_detail_suc"
                ));
        //todo get topic_db payment
        streamTableEnvironment.executeSql(ZhuKafkaUtil.getTopicDB("dwd_trade_pay_detail_suc"));


        //筛选
        Table paymentInfoTable = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['user_id'] user_id, " +
                        "data['order_id'] order_id, " +
                        "data['payment_type'] payment_type, " +
                        "data['callback_time'] callback_time, " +
                        "pt " +
                        "from topic_db " +
                        "where `table` = 'payment_info' "
                        /*
                        "and `type` = 'update' " +
                        "and data['payment_status'] = '1602'"
                         */
        );
        streamTableEnvironment.createTemporaryView("payment_Info",paymentInfoTable);

        //todo base_dic
        streamTableEnvironment.executeSql(MySqlUtil.getBaseDicLooKupDDL());

        //todo join
        Table result_table = streamTableEnvironment.sqlQuery(
                "select " +
                        "od.id order_detail_id,\n" +
                        "od.order_id,\n" +
                        "od.user_id,\n" +
                        "od.sku_id,\n" +
                        "od.sku_name,\n" +
                        "od.province_id,\n" +
                        "od.activity_id,\n" +
                        "od.activity_rule_id,\n" +
                        "od.coupon_id,\n" +
                        "pi.payment_type payment_type_code,\n" +
                        "dic.dic_name payment_type_name,\n" +
                        "pi.callback_time,\n" +
                        "od.source_id,\n" +
                        "od.source_type_code,\n" +
                        "od.source_type_name,\n" +
                        "od.sku_num,\n" +
                        "od.split_original_amount,\n" +
                        "od.split_activity_amount,\n" +
                        "od.split_coupon_amount,\n" +
                        "od.split_total_amount split_payment_amount,\n" +
                        "od.row_op_ts row_op_ts\n" +
                        "from payment_Info pi " +
                        "join dwd_trade_order_detail od on pi.order_id = od.order_id " +
                        "join `base_dic` for system_time as of pi.pt as dic " +
                        "on pi.payment_type = dic.dic_code "
        );
        streamTableEnvironment.createTemporaryView("result_table",result_table);

        //todo create and write to kafka dwd_trade_pay_detail
        // TODO 8. 创建 Kafka dwd_trade_pay_detail 表
        streamTableEnvironment.executeSql("create table dwd_trade_pay_detail_suc(\n" +
                        "order_detail_id string,\n" +
                                "order_id string,\n" +
                                "user_id string,\n" +
                                "sku_id string,\n" +
                                "sku_name string,\n" +
                                "province_id string,\n" +
                                "activity_id string,\n" +
                                "activity_rule_id string,\n" +
                                "coupon_id string,\n" +
                                "payment_type_code string,\n" +
                                "payment_type_name string,\n" +
                                "callback_time string,\n" +
                                "source_id string,\n" +
                                "source_type_code string,\n" +
                                "source_type_name string,\n" +
                                "sku_num string,\n" +
                                "split_original_amount string,\n" +
                                "split_activity_amount string,\n" +
                                "split_coupon_amount string,\n" +
                                "split_payment_amount string,\n" +
                                "row_op_ts timestamp_ltz(3),\n" +
                                "primary key(order_detail_id) not enforced\n" +
                                ")" +
                                ZhuKafkaUtil.getKafkaUpsertSinkDDL("dwd_trade_pay_detail_suc"));

        streamTableEnvironment.executeSql("" +
                        "insert into dwd_trade_pay_detail_suc select * from result_table").print();



        streamExecutionEnvironment.execute();










    }
}
