package com.group2.edu.realtime.dwd.app;

import com.group2.edu.realtime.common.base.BaseSQLApp;
import com.group2.edu.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author 高耀
 * @date 2024/12/14 16:29<p></p>
 *
 * DWD层下单事务事实表
 * 需要启动的：
 *       zk,kf,maxwell,flume1,DwdBaseLog,DwdTradeOrderDetail
 */
public class DwdTradeOrderDetail extends BaseSQLApp {
    public static void main(String[] args) {
        new DwdTradeOrderDetail().start(
                10017,
                4,
                "dwd_trade_order_detail"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment streamTableEnvironment) {
        //TODO 1 涉及到表的联结，设置状态过期时间，防止状态越来越大
        //涉及到的三张表业务上都是同时的，只考虑数据传输延迟，给个10s差不多了
        streamTableEnvironment.getConfig().setIdleStateRetention(Duration.ofSeconds(10));

        //TODO 2 从topic_db读取数据建表
        //使用kafka的connector读取，直接将表加载到流表环境中,表名为topic_db
        readTopicDb(streamTableEnvironment, "dwd_trade_order_detail");

        //TODO 3 从topic_db中过滤出order_detail的数据，并取出其中的数据组成新表
        Table orderDetail = streamTableEnvironment.sqlQuery(
                "select " +
                        "data['id'] as `id`, " +
                        "data['order_id'] as order_id, " +
                        "data['course_id'] as course_id, " +
                        "data['course_name'] as course_name, " +
                        "data['user_id'] as user_id, " +
                        "data['origin_amount'] as origin_amount, " +
                        "data['coupon_reduce'] as coupon_reduce, " +
                        "data['final_amount'] as final_amount, " +
                        "date_format(data['create_time'], 'yyyy-MM-dd') as create_time, "+
                        "ts " +
                        "from topic_db " +
                        "where " +
                        "`type` = 'insert' and `table` = 'order_detail'"
        );
//        orderDetail.execute().print();
        //注册视图
        streamTableEnvironment.createTemporaryView("order_detail", orderDetail);

        //TODO 4 从topic_db中过滤出order_info的数据，并取出其中的数据组成新表
        Table orderInfo = streamTableEnvironment.sqlQuery(
            "select " +
                    "data['id'] as order_id, " +
                    "data['province_id'] as province_id, " +
                    "data['session_id'] as session_id, " +
                    "data['out_trade_no'] as out_trade_no, " +
                    "data['trade_body'] as trade_body " +
                    "from topic_db " +
                    "where `type` = 'insert' and `table` = 'order_info'"
        );
        //注册视图
        streamTableEnvironment.createTemporaryView("order_info", orderInfo);

        //TODO 5 从dwd_traffic_page读取数据，建表
        streamTableEnvironment.executeSql(
                "create table dwd_traffic_page ( " +
                        "`common` map<STRING, STRING>, " +
                        "`page` map<STRING,STRING>, " +
                        "`ts` BIGINT " +
                        ") " + SQLUtil.getKafkaProperty("dwd_traffic_page", "dwd_trade_order_detail")
        );

        //TODO 6 从dwd_traffic_page中过滤出page_id = order的，并且取出数据
        //只要来源信息,和会话id用于关联
        Table pageOrderInfo = streamTableEnvironment.sqlQuery(
            "select " +
                    "common['sc'] as `source_id`," +
                    "common['sid'] as session_id " +
                    "from " +
                    "dwd_traffic_page " +
                    "where page['page_id'] = 'order' "
        );
        //注册表名
        streamTableEnvironment.createTemporaryView("page_order_info", pageOrderInfo);

        //TODO 7 订单明细表和订单表做内连接，和日志采集到的数据做左外连接，因为日志数据可能会丢
        Table dwdTrafficOrderDetail = streamTableEnvironment.sqlQuery(
            "select " +
                    "od.`id`, " +
                    "od.order_id, " +
                    "od.course_id, " +
                    "od.course_name, " +
                    "od.user_id, " +
                    "od.origin_amount, " +
                    "od.coupon_reduce, " +
                    "od.final_amount, " +
                    "od.create_time, " +
                    "oi.province_id, " +
                    "oi.out_trade_no, " +
                    "oi.trade_body, " +
                    "poi.source_id, " +
                    "od.`ts` " +
                    "from " +
                    "order_detail od join order_info oi " +
                    "on od.order_id = oi.order_id " +
                    "left join page_order_info poi on oi.session_id = poi.session_id "
        );

//        dwdTrafficOrderDetail.execute().print();

        //TODO 8 写入kafka对应的topic即可
        //使用upsert-kafka连接器写入
        streamTableEnvironment.executeSql(
            "create table dwd_trade_order_detail_table ( " +
                    "id string,\n" +
                    "order_id string,\n" +
                    "course_id string,\n" +
                    "course_name string,\n" +
                    "user_id string,\n" +
                    "origin_amount string,\n" +
                    "coupon_reduce string,\n" +
                    "final_amount string,\n" +
                    "create_time string,\n" +
                    "province_id string,\n" +
                    "out_trade_no string,\n" +
                    "trade_body string,\n" +
                    "source_id string,\n" +
                    "ts bigint," +
                    "primary key(id) not enforced" +
                    ")" + SQLUtil.getUpsertKafkaProperty("dwd_trade_order_detail")
        );
        //写入
        dwdTrafficOrderDetail.executeInsert("dwd_trade_order_detail_table");


    }
}
