package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @ClassName: DwdTradeOrderDetail
 * @Author: ghy
 * @Date: 2022/10/19 11:18
 * @Description:
 **/
public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.检查点相关的设置(略)
        //TODO 3.从订单预处理表中读取数据  创建Flink的动态表
        tableEnv.executeSql("create table dwd_trade_pre_order(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "coupon_reduce string,\n" +
                "final_amount string,\n" +
                "date_id string,\n" +
                "od_ts string,\n" +
                "order_status string,\n" +
                "session_id string,\n" +
                "province_id string,\n" +
                "update_date_id string,\n" +
                "`type` string,\n" +
                "`old` map<string,string>,\n" +
                "oi_ts string,\n" +
                "sc string,\n" +
                "ol_ts string,\n"+
                "row_op_ts timestamp_ltz(3)\n" +
                ")" + MyKafkaUtil.getKafkaDDL(
                "dwd_trade_pre_order", "dwd_trade_order_detail_group"));

        //TODO 4.过滤出下单数据
        Table filteredTable = tableEnv.sqlQuery("" + "select " +
                "id,\n" +
                "course_id,\n" +
                "course_name,\n" +
                "order_id,\n" +
                "user_id,\n" +
                "origin_amount,\n" +
                "coupon_reduce,\n" +
                "final_amount,\n" +
                "date_id,\n" +
                "od_ts ts,\n" +
                "session_id,\n" +
                "province_id,\n" +
                "sc,\n" +
                "row_op_ts\n" +
                "from dwd_trade_pre_order " +
                "where `type`='insert'");
        tableEnv.createTemporaryView("filtered_table", filteredTable);
        //tableEnv.executeSql("select * from filtered_table").print();

        // TODO 5. 创建 Kafka 下单明细表
        tableEnv.executeSql("create table dwd_trade_order_detail(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string,\n" +
                "order_id string,\n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "coupon_reduce string,\n" +
                "final_amount string,\n" +
                "date_id string,\n" +
                "ts string,\n" +
                "session_id string,\n" +
                "province_id string,\n" +
                "sc string,\n" +
                "row_op_ts timestamp_ltz(3),\n" +
                "primary key(id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_detail"));

        // TODO 6. 将数据写出到 Kafka
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from filtered_table");
    }
}
