package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 交易域下单事务事实表
 * @Author Administrator
 * @Time 2022-08-19-14:13
 */
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //TODO 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies
//                .failureRateRestart(10,
//                        Time.of(3L, TimeUnit.DAYS),
//                        Time.of(1L, TimeUnit.MINUTES)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.从kafka dwd_trade_order_pre_process主题读取数据，封装为 Flink SQL 表
        tableEnv.executeSql("create table order_pre_process( " +
                "    id STRING, " +
                "    course_id STRING, " +
                "    course_name STRING, " +
                "    order_id STRING, " +
                "    user_id STRING, " +
                "    order_status STRING, " +
                "    out_trade_no STRING, " +
                "    trade_body STRING, " +
                "    session_id STRING, " +
                "    province_id STRING, " +
                "    origin_amount STRING, " +
                "    coupon_reduce STRING, " +
                "    final_amount STRING, " +
                "    create_time STRING, " +
                "    date_id STRING, " +
                "    update_time STRING, " +
                "    update_time_id STRING, " +
                "    expire_time STRING, " +
                "    od_ts BIGINT, " +
                "    oi_ts BIGINT, " +
                "    row_op_ts timestamp_ltz(3) " +
                ")"+MyKafkaUtil.getKafkaDDL("dwd_trade_order_pre_process","group1"));

        //TODO 3.读取交易域下单事务事实表
        Table resulttable = tableEnv.sqlQuery("select " +
                "id, " +
                "user_id, " +
                "course_id, " +
                "course_name, " +
                "order_id, " +
                "order_status, " +
                "out_trade_no, " +
                "trade_body, " +
                "session_id, " +
                "province_id, " +
                "origin_amount, " +
                "coupon_reduce, " +
                "final_amount, " +
                "create_time, " +
                "date_id, " +
                "update_time, " +
                "update_time_id, " +
                "expire_time, " +
                "od_ts, " +
                "row_op_ts " +
                "from order_pre_process");
        tableEnv.createTemporaryView("resulttable",resulttable);

        //TODO 4.创建kafka dwd_trade_order_detail表
        tableEnv.executeSql("create table dwd_trade_order_detail( " +
                "id STRING, " +
                "user_id STRING, " +
                "course_id STRING, " +
                "course_name STRING, " +
                "order_id STRING, " +
                "order_status STRING, " +
                "out_trade_no STRING, " +
                "trade_body STRING, " +
                "session_id STRING, " +
                "province_id STRING, " +
                "origin_amount STRING, " +
                "coupon_reduce STRING, " +
                "final_amount STRING, " +
                "create_time STRING, " +
                "date_id STRING, " +
                "update_time STRING, " +
                "update_time_id STRING, " +
                "expire_time STRING, " +
                "od_ts BIGINT, " +
                "row_op_ts TIMESTAMP_LTZ(3) " +
                ")"+MyKafkaUtil.getInsertKafkaDDL("dwd_trade_order_detail"));

        //TODO 5.将数据写入kafka主题
            tableEnv.executeSql("insert into dwd_trade_order_detail select * from resulttable");

    }
}
