package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.common.kafkaTopics;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;
import java.time.ZoneId;

/**
 * @author Lec
 * @date 2022/9/5 15:02
 */

public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //TODO 1. 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        /*-----------------------设置时区的方法----------------------------*/
        tableEnv.getConfig().setLocalTimeZone(ZoneId.of("GMT+8"));

        //TODO 2. 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        */
        //TODO 3. 从kafka读取预处理过得数据

        String topicName = "dwd_trade_order_pre_process";
        String groupID = "dwd_trade_order_detail";
        tableEnv.executeSql("create table order_pre (\n" +
                "  id string,\n" +
                "  order_id string,\n" +
                "  course_id string,\n" +
                "  course_name string,\n" +
                "  user_id string,\n" +
                "  origin_amount string,\n" +
                "  coupon_reduce string,\n" +
                "  final_amount string, \n" +
                "  create_time string,\n" +
                "  update_time string,\n" +
                "  od_ts bigint, \n" +
                "  order_status string,\n" +
                "  session_id string,\n" +
                "  sc string,\n" +
                "  province_id string,\n" +
                "  oi_ts bigint, \n" +
                "  pt TIMESTAMP_LTZ(3),\n" +
                "  `type` string,\n" +
                "  `old` map<string,string>,\n" +
                "  row_op_ts TIMESTAMP_LTZ(3)\n" +
                ")"+KafkaUtil.getKafkaDDL(topicName,groupID));

        //TODO 4. 过滤出下单的数据  怎么算下单？   type为insert
        Table orderTable = tableEnv.sqlQuery("select\n" +
                "  id , \n" +
                "  order_id , \n" +
                "  course_id , \n" +
                "  course_name , \n" +
                "  user_id , \n" +
                "  origin_amount , \n" +
                "  coupon_reduce , \n" +
                "  final_amount , \n" +
                "  create_time , \n" +
                "  update_time , \n" +
                "  od_ts , \n" +
                "  order_status , \n" +
                "  session_id , \n" +
                "  sc , \n" +
                "  province_id , \n" +
                "  oi_ts , \n" +
                "  pt , \n" +
                "  row_op_ts\n" +
                "from order_pre \n" +
                "where `type` = 'insert'");
        tableEnv.createTemporaryView("orderDetail",orderTable);



        //TODO 5. 将数据写到新的kafka主题
        tableEnv.executeSql("create table order_detail (\n" +
                "  id  string,\n" +
                "  order_id  string,\n" +
                "  course_id  string,\n" +
                "  course_name  string,\n" +
                "  user_id  string,\n" +
                "  origin_amount  string,\n" +
                "  coupon_reduce  string,\n" +
                "  final_amount  string, \n" +
                "  create_time  string,\n" +
                "  update_time  string,\n" +
                "  od_ts  bigint,\n" +
                "  order_status  string,\n" +
                "  session_id  string,\n" +
                "  sc  string,\n" +
                "  province_id  string, \n" +
                "  oi_ts  bigint,\n" +
                "  pt  TIMESTAMP_LTZ(3),\n" +
                "  row_op_ts TIMESTAMP_LTZ(3)\n" +
                ")"+ KafkaUtil.getKafkaSinkDDL(groupID));

        tableEnv.executeSql("insert into order_detail select * from orderDetail");

    }
}
