package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2023/4/18
 * 交易域：支付成功事实表
 * 需要启动的进程
 *      zk、kafka、maxwell、DwdTradeOrderDetail、DwdTradePayDetailSuc
 */
public class DwdTradePayDetailSuc {
    public static void main(String[] args) {
        //TODO 1. 基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表指定环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2. 检查点相关设置(略)

        //TODO 3.从下单事实表中读取下单数据  创建动态表 ，指定Watermark以及提取事件时间字段
        tableEnv.executeSql(
                " create table dwd_trade_order_detail(\n"+
                        " id string,\n"+
                        " order_id string,\n"+
                        " course_id string,\n"+
                        " course_name string,\n"+
                        " date_id string,\n"+
                        " user_id string,\n"+
                        " final_amount string,\n"+
                        " ts string,\n"+
                        " province_id string,\n"+
                        " row_time as TO_TIMESTAMP(FROM_UNIXTIME(cast(ts as  bigint))),\n"+
                        " watermark for row_time as row_time"+
        ")"
                + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail", "dwd_trade_pay_detail_suc"));

        //TODO 4.从topic_db主题中读取业务数据 创建动态表 ，指定Watermark以及提取事件时间字段
        tableEnv.executeSql(
                "CREATE TABLE topic_db (\n" +
                        "  `database` string,\n" +
                        "  `table` string,\n" +
                        "  `type` string,\n" +
                        "  `data` MAP<string, string>,\n" +
                        "  `old` map<string,string>,\n" +
                        "  `ts` string,\n" +
                        "  `proc_time` as proctime(),\n" +
                        " row_time as TO_TIMESTAMP(FROM_UNIXTIME(cast(ts as bigint))),\n" +
                        " watermark for row_time as row_time" +
                        ") "

                + MyKafkaUtil.getKafkaDDL("topic_db", "dwd_trade_pay_detail_suc"));

        Table paymentInfo = tableEnv.sqlQuery(
                "select\n" +
                        "data['id'] id,\n" +
                        "data['order_id'] order_id,\n" +
                        "data['payment_type'] payment_type,\n" +
                        "data['callback_time'] callback_time,\n" +
                        "row_time,\n" +
                        "`proc_time`,\n" +
                        "ts\n" +
                        "from topic_db\n" +
                        "where `table` = 'payment_info'\n"

        );
        tableEnv.createTemporaryView("payment_info", paymentInfo);

        //tableEnv.executeSql("select * from payment_info").print();

        //TODO 5.从mysql中读取字典表数据(无字典表)

        //TODO 6.将上述3张表进行关联+ (两张表)
        Table resultTable = tableEnv.sqlQuery(""+
                        " select \n" +
                        " od.id order_detail_id,\n"+
                        " od.order_id ,\n"+
                        " od.course_id ,\n"+
                        " od.course_name ,\n"+
                        " od.date_id ,\n"+
                        " od.user_id ,\n"+
                        " od.final_amount ,\n"+
                        " od.province_id ,\n"+
                        " pi.payment_type,\n" +
                        " pi.ts \n" +
                        /*" from dwd_trade_order_detail od \n"+
                        " join payment_info pi  \n"+
                        " on od.order_id=pi.order_id \n"+
                        " where od.row_time >= pi.row_time - INTERVAL '15' MINUTE \n"+
                        " and od.row_time <= pi.row_time + INTERVAL '5' SECOND "+*/

                        " from dwd_trade_order_detail od , payment_info pi \n"+
                        " where od.order_id=pi.order_id \n"
                        //" and od.row_time >= pi.row_time - INTERVAL '15' MINUTE \n"+
                        //" and od.row_time <= pi.row_time + INTERVAL '5' SECOND "

        );
        tableEnv.createTemporaryView("result_table", resultTable);

        //tableEnv.executeSql("select * from result_table").print();

        //TODO 7.将关联的结果写到kafka的主题
        tableEnv.executeSql(
                " create table dwd_trade_pay_detail_suc (\n"+
                        " order_detail_id string,\n"+
                        " order_id string,\n"+
                        " course_id string,\n"+
                        " course_name string,\n"+
                        " date_id string,\n"+
                        " user_id string,\n"+
                        " final_amount string,\n"+
                        " province_id string,\n"+
                        " payment_type string,\n" +
                        " ts string ,\n" +
                        " primary key(order_detail_id) not enforced\n" +
                        " )"
                + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_pay_detail_suc"));

        tableEnv.executeSql("insert into dwd_trade_pay_detail_suc select * from result_table");

    }
}
