package com.atguigu.app.dwd.db;

import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * ClassName: DwdTradeOrderDetail
 * Package: com.atguigu.app.dwd
 * Description:
 *
 * @Author Lovxy
 * @Create 2023/5/15 20:40
 * @Version 1.0
 */
//数据流：web/app -> Mysql -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程 序：Mock -> Mysql -> Maxwell -> Kafka(ZK) -> TradeOrderDetail -> Kafka(ZK)
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2. 状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 3.使用FlinkSQL方式读取Kafka topic_db 主题数据
        tableEnv.executeSql(KafkaUtil.getTopicDbDDL("dwd_trade_order_detail"));

        //TODO 4.过滤出订单明细数据
        Table orderDetail = tableEnv.sqlQuery("" +
                "select\n" +
                " `data`['id'] id,\n" +
                " `data`['course_id'] course_id,\n" +
                " `data`['course_name'] course_name,\n" +
                " `data`['order_id'] order_id,\n" +
                " `data`['user_id'] user_id,\n" +
                " `data`['origin_amount'] origin_amount,\n" +
                " `data`['coupon_reduce'] coupon_reduce,\n" +
                " `data`['final_amount'] final_amount,\n" +
                " `data`['create_time'] create_time,\n" +
                " `data`['update_time'] update_time,\n" +
                "    `pt`\n" +
                " from topic_db\n" +
                " where `database`='edu'\n" +
                " and `table`='order_detail'\n" +
                " and `type`='insert'");

        tableEnv.createTemporaryView("order_detail", orderDetail);

          // TODO 5. 读取订单表数据
        Table orderInfo = tableEnv.sqlQuery("" +
                "select\n" +
                " `data`['id'] id ,\n" +
                " `data`['province_id'] province_id, \n" +
                " `data`['order_status'] order_status ,\n" +
                "    `pt` \n" +
                " from topic_db\n" +
                " where `database`='edu'\n" +
                " and `table`='order_info'\n" +
                " and `type`='insert'\n");
        tableEnv.createTemporaryView("order_info", orderInfo);



        // TODO 6. 读取 Kafka topic_log 主题数据
        tableEnv.executeSql(KafkaUtil.getTopicLogDDL("order_detail_log"));

        //TODO 7.加载MySQL base_source表作为LookUp表
        tableEnv.executeSql(MySqlUtil.getMysqlBaseSourceDDL());

        //TODO 8.三表关联
        Table joinTable = tableEnv.sqlQuery("" +
                " select\n" +
                " od.id,\n" +
                " od.course_id,\n" +
                " od.course_name,\n" +
                " od.order_id,\n" +
                " od.user_id,\n" +
                " od.origin_amount,\n" +
                " od.coupon_reduce,\n" +
                " od.final_amount,\n" +
                " od.create_time,\n" +
                " od.update_time,\n" +
                " od.pt ,\n" +
                " oi.province_id,\n" +
                " oi.order_status,\n" +
                " log.`common`['sc'] sc\n" +
                " from order_detail od\n" +
                " join order_info oi\n" +
                " on od.order_id=oi.id\n" +
                " join topic_log log\n" +
                " on od.user_id=log.`common`['uid']");
        tableEnv.createTemporaryView("join_table", joinTable);

        //TODO 9.关联取得source_site字段
        Table resultTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    jt.id,\n" +
                "    jt.course_id,\n" +
                "    jt.course_name,\n" +
                "    jt.order_id,\n" +
                "    jt.user_id,\n" +
                "    jt.origin_amount,\n" +
                "    jt.coupon_reduce,\n" +
                "    jt.final_amount,\n" +
                "    jt.create_time,\n" +
                "    jt.update_time,\n" +
                "    jt.province_id,\n" +
                "    jt.order_status,\n" +
                "    jt.sc,\n" +
                "    source.source_site source_name\n" +
                "from join_table jt\n" +
                " join `base_source` for system_time as of jt.pt  as source\n" +
                " on jt.sc=source.id");
        tableEnv.createTemporaryView("result_table",resultTable);

        //TODO 10.创建Kafka DWD层下单事实表主题
        tableEnv.executeSql(" create table dwd_trade_order_detail(\n" +
                " id STRING,\n" +
                " course_id STRING,\n" +
                " course_name STRING,\n" +
                " order_id STRING,\n" +
                " user_id STRING,\n" +
                " origin_amount STRING,\n" +
                " coupon_reduce STRING,\n" +
                " final_amount STRING,\n" +
                " create_time STRING,\n" +
                " update_time STRING,\n" +
                " province_id STRING,\n" +
                " order_status STRING,\n" +
                " sc STRING,\n" +
                " source_name STRING,\n" +
                "primary key(id) not enforced\n" +
                ")"+KafkaUtil.getKafkaUpsertSinkConnOption("dwd_trade_order_detail"));


        //TODO 11.写入
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from result_table");

    }
}
