package com.atguigu.realtime.app.dwd.db;

import com.atguigu.realtime.utils.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 从Kafka读取业务数据 封装为Flink表
        tableEnv.executeSql("create table topic_db(\n" +
                "`database` string,\n" +
                "`table` string,\n" +
                "`type` string,\n" +
                "`data` map<string,string>,\n" +
                "`old` map<string,string>,\n" +
                "`ts` string,\n" +
                "`pt` as PROCTIME()\n" +
                ")" + KafkaUtil.getKafkaDDL("topic_db","dwd_trade_order_detail"));

        //TODO  2.1 从Kafka中读取日志数据 封装为Flink表
        tableEnv.executeSql("create table topic_log(\n" +
                "`common` map<String,String>\n" +
                ")" + KafkaUtil.getKafkaDDL("topic_log","dwd_trade_order_detail"));

/*        tableEnv.executeSql("create table topic_log(\n" +
                "`common` map<String,String>\n" +
                ")" + KafkaUtil.getKafkaDDL("topic_log","dwd_trade_order_detail"));*/

        //TODO 3 读取订单明细表数据
        Table orderDetail = tableEnv.sqlQuery("select\n" +
                "`data`['id'] id,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['course_name'] course_name,\n" +
                "`data`['order_id'] order_id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['origin_amount'] origin_amount,\n" +
                "`data`['coupon_reduce'] coupon_reduce,\n" +
                "`data`['final_amount'] final_amount,\n" +
                "`data`['session_id'] session_id,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['update_time'] update_time,\n" +
                "ts\n" +
                "from\n" +
                "`topic_db`\n" +
                "where\n" +
                "`table` = 'order_detail'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("order_detail",orderDetail);

        //TODO 3.1 读取log日志
        Table logDetail = tableEnv.sqlQuery("select\n" +
                "`common`['sc'] sc,\n" +
                "`common`['sid'] session_id\n" +
                "from\n" +
                "topic_log");
        tableEnv.createTemporaryView("log_detail",logDetail);

/*        Table logDetail = tableEnv.sqlQuery("select\n" +
                "`common`['sc'] sc,\n" +
                "`common`['uid'] uid\n" +
                "from\n" +
                "topic_log\n");
        tableEnv.createTemporaryView("log_detail",logDetail);*/

        //TODO 4 获取订单明细表
        Table resultTable1 = tableEnv.sqlQuery("select\n" +
                "id,\n" +
                "course_id,\n" +
                "course_name,\n" +
                "order_id,\n" +
                "user_id,\n" +
                "origin_amount,\n" +
                "coupon_reduce,\n" +
                "final_amount,\n" +
                "session_id,\n" +
                "create_time,\n" +
                "update_time,\n" +
                "ts\n" +
                "from\n" +
                "order_detail");
        tableEnv.createTemporaryView("result_table1",resultTable1);

        //TODO 4.1 获取日志数据
        Table resultTable2 = tableEnv.sqlQuery("select\n" +
                "`common`['sc'] sc,\n" +
                "`common`['sid'] session_id\n" +
                "from\n" +
                "topic_log");
        tableEnv.createTemporaryView("result_table2",resultTable2);

        //TODO 4.2 关联两张表
        Table resultTable = tableEnv.sqlQuery("select\n" +
                "id,\n" +
                "course_id,\n" +
                "course_name,\n" +
                "order_id,\n" +
                "user_id,\n" +
                "origin_amount,\n" +
                "coupon_reduce,\n" +
                "final_amount,\n" +
                "r1.session_id,\n" +
                "create_time,\n" +
                "update_time,\n" +
                "r2.sc,\n" +
                "ts\n" +
                "from\n" +
                "result_table1 r1\n" +
                "join\n" +
                "result_table2 r2\n" +
                "on\n" +
                "r1.session_id = r2.session_id");
        tableEnv.createTemporaryView("result_table",resultTable);

/*        //TODO 5.1 建立dwd_trade_log表
        tableEnv.executeSql("create table dwd_trade_log(\n" +
                "sc String,\n" +
                "session_id String\n" +
                ")");
        tableEnv.executeSql("insert into dwd_trade_log select * from result_table2");*/

/*        Table resultTable2 = tableEnv.sqlQuery("select\n" +
                "sc,\n" +
                "uid \n" +
                "from\n" +
                "logDetail");
        tableEnv.createTemporaryView("result_table2", resultTable2);*/

        //TODO 5 建立 Kafka-Connector dwd_trade_order_detail 表
        tableEnv.executeSql("create table dwd_trade_order_detail(\n" +
                "id String,\n" +
                "course_id String,\n" +
                "course_name String,\n" +
                "order_id String,\n" +
                "user_id String,\n" +
                "origin_amount String,\n" +
                "coupon_reduce String,\n" +
                "final_amount String,\n" +
                "session_id String,\n" +
                "create_time String,\n" +
                "update_time String,\n" +
                "sc String,\n" +
                "ts String" +
                ")" + KafkaUtil.getKafkaSinkDDL("dwd_trade_order_detail"));



        //TODO 6 将关联结果写入 Kafka-Connector 表
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from result_table");
    }
}
