package com.atguigu.gmall.app.dwd.db;

import com.atguigu.gmall.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author yhm
 * @create 2022-09-13 15:18
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 3 读取订单预处理数据
        String topicName = "dwd_trade_order_pre_process";
        String groupId = "dwd_trade_order_detail";
        tableEnv.executeSql("create table kafka_order_detail(\n" +
                "  id STRING ,\n" +
                "  order_id STRING ,\n" +
                "  sku_id STRING ,\n" +
                "  sku_name STRING ,\n" +
                "  order_price STRING ,\n" +
                "  sku_num STRING ,\n" +
                "  create_time STRING ,\n" +
                "  source_type STRING ,\n" +
                "  source_id STRING ,\n" +
                "  split_original_amount STRING ,\n" +
                "  split_total_amount STRING ,\n" +
                "  split_activity_amount STRING ,\n" +
                "  split_coupon_amount STRING ,\n" +
                "  od_ts BIGINT ,\n" +
                "  od_pt TIMESTAMP_LTZ(3) ,\n" +
                "  user_id STRING ,\n" +
                "  province_id STRING ,\n" +
                "  operate_time STRING ,\n" +
                "  order_status STRING ,\n" +
                "  oi_ts BIGINT ,\n" +
                "  oi_pt TIMESTAMP_LTZ(3) ,\n" +
                "  activity_id STRING ,\n" +
                "  activity_rule_id STRING ,\n" +
                "  coupon_id STRING," +
                "  type STRING  \n" +
                ")" + KafkaUtil.getKafkaDDL(topicName,groupId));

        // TODO 4 过滤出下单数据
        Table orderDetailTable = tableEnv.sqlQuery("select \n" +
                "  id ,\n" +
                "  order_id ,\n" +
                "  sku_id ,\n" +
                "  sku_name ,\n" +
                "  order_price ,\n" +
                "  sku_num ,\n" +
                "  create_time ,\n" +
                "  source_type ,\n" +
                "  source_id ,\n" +
                "  split_original_amount ,\n" +
                "  split_total_amount ,\n" +
                "  split_activity_amount ,\n" +
                "  split_coupon_amount ,\n" +
                "  od_ts ,\n" +
                "  od_pt ,\n" +
                "  user_id ,\n" +
                "  province_id ,\n" +
                "  operate_time ,\n" +
                "  order_status ,\n" +
                "  oi_ts ,\n" +
                "  oi_pt ,\n" +
                "  activity_id ,\n" +
                "  activity_rule_id ,\n" +
                "  coupon_id, \n" +
                "  UNIX_TIMESTAMP() rt \n" +
                "from kafka_order_detail\n" +
                "where `type`='insert'");
        tableEnv.createTemporaryView("order_detail",orderDetailTable);

        // TODO 5 写出到新的kafka主题
        String targetTopicName = "dwd_trade_order_detail";

        tableEnv.executeSql("create table order_detail_result(\n" +
                "  id STRING ,\n" +
                "  order_id STRING ,\n" +
                "  sku_id STRING ,\n" +
                "  sku_name STRING ,\n" +
                "  order_price STRING ,\n" +
                "  sku_num STRING ,\n" +
                "  create_time STRING ,\n" +
                "  source_type STRING ,\n" +
                "  source_id STRING ,\n" +
                "  split_original_amount STRING ,\n" +
                "  split_total_amount STRING ,\n" +
                "  split_activity_amount STRING ,\n" +
                "  split_coupon_amount STRING ,\n" +
                "  od_ts BIGINT ,\n" +
                "  od_pt TIMESTAMP_LTZ(3) ,\n" +
                "  user_id STRING ,\n" +
                "  province_id STRING ,\n" +
                "  operate_time STRING ,\n" +
                "  order_status STRING ,\n" +
                "  oi_ts BIGINT ,\n" +
                "  oi_pt TIMESTAMP_LTZ(3) ,\n" +
                "  activity_id STRING ,\n" +
                "  activity_rule_id STRING ,\n" +
                "  coupon_id STRING," +
                "  rt bigint," +
                "  PRIMARY KEY (id) NOT ENFORCED \n \n" +
                ")" + KafkaUtil.getUpsertKafkaSinkDDL(targetTopicName));

        // 写出到kafka
        tableEnv.executeSql("insert into order_detail_result select * from order_detail");
    }
}
