package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //读取kafka主题
        tableEnv.executeSql("" +
                "create table dwd_trade_order_pre_process(\n" +
                "id string,\n" +
                "course_id string,\n" +
                "course_name string, \n" +
                "order_id string, \n" +
                "user_id string,\n" +
                "origin_amount string,\n" +
                "coupon_amount string,\n" +
                "final_amount string,\n" +
                "od_ts string,\n" +
                "order_status string,\n" +
                "session_id string,\n" +
                "province_id string,\n" +
                "session_id string,\n" +
                "operate_date_id string,\n" +
                "operate_time string,\n" +
                "`type` string,\n" +
                " `old` map<string,string>,\n" +
                " oi_ts string, \n" +
                " row_op_ts timestamp_ltz(3)\n" +
                ")"
                +MyKafkaUtil.getKafkaDDL(
                "dwd_trade_order_pre_process", "dwd_trade_order_detail"
        ));

//过滤下单数据
        Table filteredTable = tableEnv.sqlQuery("" +
                "select  " +
                "id ,\n" +
                "course_id ,\n" +
                "course_name ,\n" +
                "order_id ,\n" +
                "user_id ,\n" +
                "origin_amount ,\n" +
                "coupon_amount ,\n" +
                "final_amount ,\n" +
                "od_ts ,\n" +
                "session_id ,\n" +
                "province_id ,\n" +
                "operate_date_id ,\n" +
                "operate_time ,\n" +
                "row_op_ts \n" +
                "from dwd_trade_order_pre_process " +
                "where `type`='insert'");
        tableEnv.createTemporaryView("filtered_table",filteredTable);
        //创建kafka下单明细表
        tableEnv.executeSql(""+
                        "create table dwd_trade_order_detail(\n" +
                        "id string,\n" +
                        "course_id string,\n" +
                        "course_name string,\n" +
                        "order_id string,\n" +
                        "user_id string,\n" +
                        "origin_amount string,\n" +
                        "coupon_amount string,\n" +
                        "final_amount string,\n" +
                        "od_ts string,\n" +
                        "session_id string,\n" +
                        "province_id string,\n" +
                        "operate_date_id string,\n" +
                        "operate_time string,\n" +
                        "row_op_ts timestamp_ltz(3),\n" +
                        "primary key(id) not enforced\n" +")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_detail"));

        //将数据写出到kafka
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from filtered_table");
    }
}