package com.atguigu.gmall.app.dwd.db;

import com.atguigu.gmall.util.KafkaUtil;
import com.atguigu.gmall.util.MySQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author yhm
 * @create 2022-09-14 9:10
 */
public class DwdTradePayDetailSuc {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(905L));

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 3 读取下单事务事实表
        String topicName = "dwd_trade_order_detail";
        String groupId = "dwd_trade_pay_detail_suc";
        tableEnv.executeSql("create table order_detail(\n" +
                "  id STRING ,\n" +
                "  order_id STRING ,\n" +
                "  sku_id STRING ,\n" +
                "  sku_name STRING ,\n" +
                "  order_price STRING ,\n" +
                "  sku_num STRING ,\n" +
                "  create_time STRING ,\n" +
                "  source_type STRING ,\n" +
                "  source_id STRING ,\n" +
                "  split_original_amount STRING ,\n" +
                "  split_total_amount STRING ,\n" +
                "  split_activity_amount STRING ,\n" +
                "  split_coupon_amount STRING ,\n" +
                "  od_ts BIGINT ,\n" +
                "  od_pt TIMESTAMP_LTZ(3) ,\n" +
                "  user_id STRING ,\n" +
                "  province_id STRING ,\n" +
                "  operate_time STRING ,\n" +
                "  order_status STRING ,\n" +
                "  oi_ts BIGINT ,\n" +
                "  oi_pt TIMESTAMP_LTZ(3) ,\n" +
                "  activity_id STRING ,\n" +
                "  activity_rule_id STRING ,\n" +
                "  coupon_id STRING" +
                ")" + KafkaUtil.getKafkaDDL(topicName,groupId));

        // TODO 4 读取topic_db 过滤出支付成功信息数据
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `ts` bigint,\n" +
                "  `data` MAP<STRING,STRING>,\n" +
                "  `old` MAP<STRING,STRING>,\n" +
                "  `pt` as proctime()\n" +
                ")" + KafkaUtil.getKafkaDDL("topic_db",groupId));

        Table paySucTable = tableEnv.sqlQuery("select \n" +
                "  data['order_id'] order_id,\n" +
                "  data['user_id'] user_id,\n" +
                "  data['payment_type'] payment_type,\n" +
                "  data['callback_time'] callback_time,\n" +
                "  `ts`,\n" +
                "  `pt`\n" +
                "from topic_db\n" +
                "where `table`='payment_info'\n" +
                "and (`type`='update' and `data`['payment_status']='1602')");
        tableEnv.createTemporaryView("pay_success",paySucTable);

        // TODO 5 读取字典表
        tableEnv.executeSql(MySQLUtil.getBaseDicDDL());

        // TODO 6 join读取的3张表格
        Table paySucDetail = tableEnv.sqlQuery("select \n" +
                "  od.id ,\n" +
                "  od.order_id ,\n" +
                "  od.sku_id ,\n" +
                "  od.sku_name ,\n" +
                "  od.order_price ,\n" +
                "  od.sku_num ,\n" +
                "  od.create_time ,\n" +
                "  od.source_type ,\n" +
                "  od.source_id ,\n" +
                "  od.split_original_amount ,\n" +
                "  od.split_total_amount ,\n" +
                "  od.split_activity_amount ,\n" +
                "  od.split_coupon_amount ,\n" +
                "  od.od_ts ,\n" +
                "  od.od_pt ,\n" +
                "  od.user_id ,\n" +
                "  od.province_id ,\n" +
                "  od.operate_time ,\n" +
                "  od.oi_ts ,\n" +
                "  od.oi_pt ,\n" +
                "  od.activity_id ,\n" +
                "  od.activity_rule_id ,\n" +
                "  od.coupon_id ,\n" +
                "  bd.dic_name payment_type,\n" +
                "  ps.callback_time,\n" +
                "  ps.ts ps_ts,\n" +
                "  ps.pt ps_pt\n" +
                "from pay_success ps\n" +
                "join order_detail od\n" +
                "on ps.order_id=od.order_id\n" +
                "join base_dic FOR SYSTEM_TIME AS OF ps.pt as bd\n" +
                "on ps.payment_type=bd.dic_code");
        tableEnv.createTemporaryView("pay_suc_result",paySucDetail);


        // TODO 7 写出到kafka中
        String targetTopic = "dwd_trade_pay_detail_suc";
        tableEnv.executeSql("create table pay_sec_detail(\n" +
                "  id STRING,\n" +
                "  order_id STRING,\n" +
                "  sku_id STRING,\n" +
                "  sku_name STRING,\n" +
                "  order_price STRING,\n" +
                "  sku_num STRING,\n" +
                "  create_time STRING,\n" +
                "  source_type STRING,\n" +
                "  source_id STRING,\n" +
                "  split_original_amount STRING,\n" +
                "  split_total_amount STRING,\n" +
                "  split_activity_amount STRING,\n" +
                "  split_coupon_amount STRING,\n" +
                "  od_ts bigint,\n" +
                "  od_pt TIMESTAMP_LTZ(3),\n" +
                "  user_id STRING,\n" +
                "  province_id STRING,\n" +
                "  operate_time STRING,\n" +
                "  oi_ts bigint,\n" +
                "  oi_pt TIMESTAMP_LTZ(3),\n" +
                "  activity_id STRING,\n" +
                "  activity_rule_id STRING,\n" +
                "  coupon_id STRING,\n" +
                "  payment_type STRING,\n" +
                "  callback_time STRING,\n" +
                "  ps_ts bigint,\n" +
                "  ps_pt TIMESTAMP_LTZ(3)\n" +
                ")" + KafkaUtil.getKafkaSinkDDL(targetTopic));
        tableEnv.executeSql("insert into pay_sec_detail select * from pay_suc_result");

    }
}
