package com.myCompany.edu.realtime.app.dwd.db;

import com.myCompany.edu.realtime.app.BaseSQLApp;
import com.myCompany.edu.realtime.common.Constant;
import com.myCompany.edu.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dwd03_DwdTradeOrderDetail extends BaseSQLApp {
    public static void main(String[] args) {
        new Dwd03_DwdTradeOrderDetail().init(
                2003,
                2,
                "Dwd03_DwdTradeOrderDetail"
        );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        //1、读取ods_db
        readOdsDb(tEnv,"Dwd03_DwdTradeOrderDetail");

        //2、过滤订单详情：只要insert
        Table orderDetail = tEnv.sqlQuery("select " +
                " data['id'] id, " +
                " data['course_id'] course_id, " +
                " data['course_name'] course_name, " +
                " data['order_id'] order_id, " +
                " data['user_id'] user_id, " +
                " data['origin_amount'] split_origin_amount, " +
                " data['coupon_reduce'] split_coupon_reduce, " +
                " data['final_amount'] split_final_amount, " +
                " data['create_time'] create_time, " +
                " ts, " +
                " pt " +
                " from ods_db" +
                " where `database` = 'edu' " +
                " and `table` = 'order_detail' " +
                " and `type` = 'insert' ");
//        orderDetail.execute().print();
        tEnv.createTemporaryView("order_detail",orderDetail);

        //3、过滤出订单表order_info 只要insert
        Table orderInfo = tEnv.sqlQuery(" select " +
                " data['id'] id, " +
                " data['session_id'] session_id, " +
                " data['province_id'] province_id " +
                " from ods_db" +
                " where `database` = 'edu' " +
                " and `table` = 'order_info' " +
                " and `type` = 'insert' ");
        tEnv.createTemporaryView("order_info",orderInfo);

        //4、读取ods_log,挑选session_id和source_id
        tEnv.executeSql("create table dwd_traffic_page(" +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint " +
                ")" + SQLUtil.getKafkaSourceSQL(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dwd03_DwdTradeOrderDetail"));

        Table pageInfo = tEnv.sqlQuery("select " +
                " common['sid'] session_id, " +
                " common['sc'] source_id " +
                " from dwd_traffic_page ");
        tEnv.createTemporaryView("page_info",pageInfo);

        //5、3张表开始join
        Table result = tEnv.sqlQuery(
                "select " +
                " od.id, " +
                " od.user_id, " +
                " od.order_id, " +
                " od.course_id, " +
                " od.course_name, " +
                " oi.province_id, " +
                " pi.source_id, " +
                " od.split_origin_amount, " +
                " od.split_coupon_reduce, " +
                " od.split_final_amount, " +
                " od.create_time, " +
                " od.ts, " +
                // 返回每行数据计算的实时的时间: 用于后期的去重处理. 找这个时间最大
                " current_row_timestamp() row_op_ts " +
                " from order_detail od " +
                " join order_info oi on od.order_id = oi.id " +
                " join page_info pi on oi.session_id = pi.session_id");
        //join之后会有重复数据，一条包含session_id订单信息的数据，会与包含session_id的多条page数据相连接（一个session包含多条page）
        //需要将结果下入upsertkafka
        result.execute().print();
        //6、建动态表与 kafka 的 topic 关联
        tEnv.executeSql("create table dwd_trade_order_detail( " +
                " id string, " +
                " user_id string, " +
                " order_id string, " +
                " course_id string, " +
                " course_name string, " +
                " province_id string, " +
                " source_id string, " +
                " split_origin_amount string, " +
                " split_coupon_reduce string, " +
                " split_final_amount string, " +
                " create_time string, " +
                " ts bigint, " +
                // 返回每行数据计算的实时的时间: 用于后期的去重处理. 找这个时间最大
                " row_op_ts timestamp_ltz(3), " +
                " primary key(id) not enforced " +
                ") " + SQLUtil.getUpsertKafkaSinkSQL(Constant.TOPIC_DWD_TRADE_ORDER_DETAIL));
//需要将结果下入upsertkafka
        //7、向upsertkafka传数据
        result.executeInsert("dwd_trade_order_detail");
    }

}
