package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 订单预处理表
 * @Author Administrator
 * @Time 2022-08-19-14:56
 */
public class DwdTradeOrderPreProcess {
    public static void main(String[] args) {
        //TODO 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies
//                .failureRateRestart(10,
//                        Time.of(3L, TimeUnit.DAYS),
//                        Time.of(1L, TimeUnit.MINUTES)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.从kafka topic_db主题读取数据，封装为 Flink SQL 表
        tableEnv.executeSql(MyKafkaUtil.getTopicDb("topic_db"));
        //读取order_info数据
        Table orderinfo = tableEnv.sqlQuery("select " +
                "`data`['id'] id, " +
                "`data`['user_id'] user_id, " +
                "`data`['origin_amount'] origin_amount, " +
                "`data`['coupon_reduce'] coupon_reduce, " +
                "`data`['final_amount'] final_amount, " +
                "`data`['order_status'] order_status, " +
                "`data`['out_trade_no'] out_trade_no, " +
                "`data`['trade_body'] trade_body, " +
                "`data`['session_id'] session_id, " +
                "`data`['province_id'] province_id, " +
                "`data`['create_time'] create_time, " +
                "`data`['expire_time'] expire_time, " +
                "`data`['update_time'] update_time, " +
                "ts oi_ts " +
                "from topic_db " +
                " where  " +
                " `database`='gmall' " +
                " and `table`='order_info' " +
                " and  `type` = 'insert' ");
        tableEnv.createTemporaryView("orderinfo",orderinfo);

        
        //读取order_detail数据
        Table orderdetail = tableEnv.sqlQuery("select " +
                "`data`['id'] id, " +
                "`data`['course_id'] course_id, " +
                "`data`['course_name'] course_name, " +
                "`data`['order_id'] order_id, " +
                "`data`['user_id'] user_id, " +
                "`data`['origin_amount'] origin_amount, " +
                "`data`['coupon_reduce'] coupon_reduce, " +
                "`data`['final_amount'] final_amount, " +
                "`data`['create_time'] create_time, " +
                "`data`['update_time'] update_time, " +
                "ts od_ts " +
                "from topic_db " +
                " where  " +
                " `database`='gmall' " +
                " and `table`='order_detail' " +
                " and `type`='insert'");
        tableEnv.createTemporaryView("orderdetail",orderdetail);

        //TODO 3.将两表进行关联
        Table resulttable = tableEnv.sqlQuery("select " +
                "    od.id, " +
                "    od.course_id, " +
                "    od.course_name, " +
                "    od.order_id, " +
                "    od.user_id, " +
                "    oi.order_status, " +
                "    oi.out_trade_no, " +
                "    oi.trade_body, " +
                "    oi.session_id, " +
                "    oi.province_id, " +
                "    od.origin_amount, " +
                "    od.coupon_reduce, " +
                "    od.final_amount, " +
                "    od.create_time, " +
                "    date_format(od.create_time, 'yyyy-MM-dd') date_id, " +
                "    od.update_time, " +
                "    date_format(od.update_time, 'yyyy-MM-dd') update_time_id, " +
                "    oi.expire_time, " +
                "    od_ts, " +
                "    oi_ts, " +
                "    current_row_timestamp() row_op_ts " +
                "from orderdetail od " +
                "join orderinfo oi " +
                "on od.order_id=oi.id");
        tableEnv.createTemporaryView("resulttable",resulttable);

        //TODO 4.创建预处理表
            tableEnv.executeSql("create table dwd_trade_order_pre_process( " +
                    "    id STRING, " +
                    "    course_id STRING, " +
                    "    course_name STRING, " +
                    "    order_id STRING, " +
                    "    user_id STRING, " +
                    "    order_status STRING, " +
                    "    out_trade_no STRING, " +
                    "    trade_body STRING, " +
                    "    session_id STRING, " +
                    "    province_id STRING, " +
                    "    origin_amount STRING, " +
                    "    coupon_reduce STRING, " +
                    "    final_amount STRING, " +
                    "    create_time STRING, " +
                    "    date_id STRING, " +
                    "    update_time STRING, " +
                    "    update_time_id STRING, " +
                    "    expire_time STRING, " +
                    "    od_ts BIGINT, " +
                    "    oi_ts BIGINT, " +
                    "    row_op_ts timestamp_ltz(3) " +
                    ")"+MyKafkaUtil.getInsertKafkaDDL("dwd_trade_order_pre_process"));

            //TODO 5.将数据写入主题
            tableEnv.executeSql("insert into dwd_trade_order_pre_process select * from resulttable");


    }
}
