package com.atguigu.app.dwd.db;

import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneId;

/**
 * @author hangyangfei
 * 交易域下单事务事实表
 * 需要启动的进程：zk、kafka、maxwell、DwdTradeOrderPreProcess、DwdTradeOrderDetail
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // 设置时区的方法
        tableEnv.getConfig().setLocalTimeZone(ZoneId.of("GMT+8"));

        // TODO 3 读取kafka订单预处理数据
        String topicName = "dwd_trade_order_pre_process";
        String groupID = "dwd_trade_order_detail";
        tableEnv.executeSql("create table order_pre(\n" +
                        "   id string,\n" +
                        "   order_id string,\n" +
                        "   course_id string,\n" +
                        "   course_name string,\n" +
                        "   user_id string,\n" +
                        "   origin_amount string,\n" +
                        "   coupon_reduce string,\n" +
                        "   final_amount string,\n" +
                        "   create_time string,\n" +
                        "   date_id string,\n" +
                        "   od_ts bigint,\n" +
                        "   order_status string,\n" +
                        "   province_id string,\n" +
                        "   session_id string,\n" +
                        "   expire_time string,\n" +
                        "   expire_time_date string,\n" +
                        "   oi_ts bigint,\n" +
                        "   pt TIMESTAMP_LTZ(3),\n" +
                        "   `type` string,\n" +
                        "   `old` map<string,string>,\n" +
                        "   row_op_ts TIMESTAMP_LTZ(3)\n" +
                        ")" + KafkaUtil.getKafkaDDL(topicName,groupID));

        // TODO 4 过滤出下单数据
        Table filterTable = tableEnv.sqlQuery("select \n" +
                "  id,\n" +
                "  order_id,\n" +
                "  course_id,\n" +
                "  course_name,\n" +
                "  user_id,\n" +
                "  origin_amount,\n" +
                "  coupon_reduce,\n" +
                "  final_amount,\n" +
                "  create_time,\n" +
                "  date_id,\n" +
                "  od_ts,\n" +
                "  order_status,\n" +
                "  province_id,\n" +
                "  session_id,\n" +
                "  expire_time,\n" +
                "  expire_time_date,\n" +
                "  oi_ts,\n" +
                "  pt,\n" +
                "  row_op_ts\n" +
                "from order_pre\n" +
                "where type = 'insert'");
        tableEnv.createTemporaryView("filter_table",filterTable);
        tableEnv.executeSql("select * from filter_table").print();
        // TODO 5 写出到新的kafka主题
        tableEnv.executeSql("create table order_detail(\n" +
                "  id string,\n" +
                "  order_id string,\n" +
                "  course_id string,\n" +
                "  course_name string,\n" +
                "  user_id string,\n" +
                "  origin_amount string,\n" +
                "  coupon_reduce string,\n" +
                "  final_amount string,\n" +
                "  create_time string,\n" +
                "  date_id string,\n" +
                "  od_ts bigint,\n" +
                "  order_status string,\n" +
                "  province_id string,\n" +
                "  session_id string,\n" +
                "  expire_time string,\n" +
                "  expire_time_date string,\n" +
                "  oi_ts bigint,\n" +
                "  pt TIMESTAMP_LTZ(3),\n" +
                "  row_op_ts TIMESTAMP_LTZ(3),\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")" + KafkaUtil.getUpsertKafkaSinkDDL("dwd_trade_order_detail"));

        tableEnv.executeSql("insert into order_detail select * from filter_table");

    }
}
