package com.atguigu.gmallrealtime.app.dwd.db;

import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.HBaseUtil;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author yhm
 * @create 2023-09-27 10:32
 */
public class DwdTradePayDetailSuc {
    public static void main(String[] args) {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 添加检查点和状态后端
        //        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        //
        //        //2.2 设置检查点超时时间
        //        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //        //2.3 设置job取消之后 检查点是否保留
        //        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //        //2.4 设置两个检查点之间最小的时间间隔
        //        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //        //2.5 设置重启策略
        //        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        //        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //
        //        env.setStateBackend(new HashMapStateBackend());
        //        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //
        //        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取topic_db数据
        String groupId = "dwd_trade_pay_detail_suc";
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `ts`    BIGINT,\n" +
                "  `data` Map<STRING,STRING>,\n" +
                "  `old`  Map<STRING,STRING>,\n" +
                "   proc_time AS proctime() ,\n" +
                "   row_time AS TO_TIMESTAMP(FROM_UNIXTIME(cast (ts as bigint))) ,\n" +
                "   WATERMARK FOR row_time AS row_time \n" +
                ") "
                +MyKafkaUtil.getKafkaDDL(Constant.TOPIC_ODS_DB,groupId));

        // TODO 4 筛选出支付成功的数据
        Table payTable = tableEnv.sqlQuery("select \n" +
                "    data['id'] id, \n" +
                "    data['out_trade_no'] out_trade_no, \n" +
                "    data['order_id'] order_id, \n" +
                "    data['user_id'] user_id, \n" +
                "    data['payment_type'] payment_type, \n" +
                "    data['trade_no'] trade_no, \n" +
                "    data['subject'] subject, \n" +
                "    data['callback_time'] callback_time, \n" +
                "    data['callback_content'] callback_content, \n" +
                "    ts,\n" +
                "    proc_time,\n" +
                "    row_time\n" +
                "from topic_db\n" +
                "where `table`='payment_info'\n" +
                "and `type`='update'\n" +
                "and data['payment_status']='1602'\n" +
                "and `old`['payment_status'] is not null");

        tableEnv.createTemporaryView("pay",payTable);

        // TODO 5 lookup join base_dic数据
        tableEnv.executeSql(HBaseUtil.getBaseDicDDL());

        Table payTypeTable = tableEnv.sqlQuery("SELECT \n" +
                "    id,\n" +
                "    out_trade_no,\n" +
                "    order_id,\n" +
                "    user_id,\n" +
                "    payment_type,\n" +
                "    info.dic_name payment_type_name,\n" +
                "    trade_no,\n" +
                "    subject,\n" +
                "    callback_time,\n" +
                "    callback_content,\n" +
                "    ts,\n" +
                "    proc_time,\n" +
                "    row_time\n" +
                "FROM pay AS p\n" +
                "left JOIN base_dic FOR SYSTEM_TIME AS OF p.proc_time AS b\n" +
                "    ON p.payment_type = b.rowkey");

        tableEnv.createTemporaryView("pay_type",payTypeTable);

        // TODO 6 读取下单明细数据
        tableEnv.executeSql("create table od(\n" +
                "    id STRING,\n" +
                "    order_id STRING,\n" +
                "    sku_id STRING,\n" +
                "    sku_name STRING,\n" +
                "    order_price STRING,\n" +
                "    sku_num STRING,\n" +
                "    create_time STRING,\n" +
                "    source_type STRING,\n" +
                "    source_id STRING,\n" +
                "    activity_id STRING,\n" +
                "    activity_rule_id STRING,\n" +
                "    coupon_id STRING,\n" +
                "    user_id STRING,\n" +
                "    province_id STRING,\n" +
                "    split_total_amount STRING,\n" +
                "    split_activity_amount STRING,\n" +
                "    split_coupon_amount STRING,\n" +
                "    ts BIGINT,\n" +
                "    proc_time TIMESTAMP(3), \n" +
                "   row_time AS TO_TIMESTAMP(FROM_UNIXTIME(cast (ts as bigint))) ,\n" +
                "   WATERMARK FOR row_time AS row_time \n" +
                ")"
                + MyKafkaUtil.getKafkaDDL(Constant.TOPIC_DWD_TRADE_ORDER_DETAIL,groupId));

        // TODO 7 interval join 支付成功和下单明细
        Table resultTable = tableEnv.sqlQuery("SELECT \n" +
                "  o.id,\n" +
                "  o.order_id,\n" +
                "  sku_id,\n" +
                "  sku_name,\n" +
                "  order_price,\n" +
                "  sku_num,\n" +
                "  create_time,\n" +
                "  source_type,\n" +
                "  source_id,\n" +
                "  activity_id,\n" +
                "  activity_rule_id,\n" +
                "  coupon_id,\n" +
                "  split_total_amount,\n" +
                "  split_activity_amount,\n" +
                "  split_coupon_amount,\n" +
                "  o.user_id,\n" +
                "  province_id,\n" +
                "  p.id pay_id,\n" +
                "  out_trade_no,\n" +
                "  payment_type,\n" +
                "  payment_type_name,\n" +
                "  trade_no,\n" +
                "  subject,\n" +
                "  callback_time,\n" +
                "  callback_content,\n" +
                "  p.row_time\n" +
                "FROM pay_type p, od o\n" +
                "WHERE p.order_id = o.order_id\n" +
                "AND p.row_time BETWEEN o.row_time - INTERVAL '16' MINUTE AND o.row_time + INTERVAL '5' SECOND\n");

        tableEnv.createTemporaryView("result_table",resultTable);

        // TODO 8 写出到kafka
        tableEnv.executeSql("create table kafka_sink(\n" +
                "  id STRING,\n" +
                "  order_id STRING,\n" +
                "  sku_id STRING,\n" +
                "  sku_name STRING,\n" +
                "  order_price STRING,\n" +
                "  sku_num STRING,\n" +
                "  create_time STRING,\n" +
                "  source_type STRING,\n" +
                "  source_id STRING,\n" +
                "  activity_id STRING,\n" +
                "  activity_rule_id STRING,\n" +
                "  coupon_id STRING,\n" +
                "  split_total_amount STRING,\n" +
                "  split_activity_amount STRING,\n" +
                "  split_coupon_amount STRING,\n" +
                "  user_id STRING,\n" +
                "  province_id STRING,\n" +
                "  pay_id STRING,\n" +
                "  out_trade_no STRING,\n" +
                "  payment_type STRING,\n" +
                "  payment_type_name STRING,\n" +
                "  trade_no STRING,\n" +
                "  subject STRING,\n" +
                "  callback_time STRING,\n" +
                "  callback_content STRING,\n" +
                "  row_time TIMESTAMP(3),\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDLL(Constant.TOPIC_DWD_TRADE_PAY_DETAIL_SUC));

        tableEnv.executeSql("insert into kafka_sink select * from result_table");

    }
}
