package com.atguigu.app.dwd.db;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.util.KafkaUtil_wm;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

public class Dwd_Trade_Source_Ready {
    public static void main(String[] args) {
        // TODO: 2023/3/28 需求：交易域来源粒度备用
        //  用dwd层订单表中的course_id关联购物车表的course_id 拿到购物车表的 session_id
        //  读取topic_log主题，读取common中的sid即 session_id，关联购物车表的session_id
        //  获取到 common 中的 sc 即来源 id
        //  4、在dws做：再关联来源表 base_source 表中的 id 拿到来源名，source_site
        // TODO: 2023/3/28 获取 flink 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );
        // TODO: 2023/3/28 获取表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create( env );
        // TODO: 2023/3/28 消费加购表
        tableEnv.executeSql( "create table topic_db(" +
                "`database` string," +
                "`table` string," +
                "`type` string," +
                "`data` map<string,string>," +
                "`old` map<string,string>," +
                "`ts` string," +
                "`proc_time` as proctime()" +
                ")" + KafkaUtil_wm.getKafkaDDL( "topic_db", "trade_cart_add" ) );
        Table cartAdd = tableEnv.sqlQuery( "" +
                "select  " +
                "   `data`['id'] id,  " +
                "   `data`['user_id'] user_id,  " +
                "   `data`['course_id'] course_id,  " +
                "   `data`['course_name'] course_name,  " +
                "   `data`['cart_price'] cart_price,  " +
                "   `data`['session_id'] session_id,  " +
                "   `data`['create_time'] create_time,  " +
                "   `data`['deleted'] deleted,  " +
                "   `data`['sold'] sold  " +
                " from topic_db  " +
                " where `database`='edu'  " +
                " and `table`='cart_info'  " +
                " and `type`='insert'" );
        tableEnv.createTemporaryView( "cart_info_table", cartAdd );
//        // TODO: 2023/3/28 消费订单表  order_info
        Table orderInfoTable = tableEnv.sqlQuery( "" +
                "select   " +
                " `data`['id'] id,   " +
                " `data`['order_id'] order_id,   " +
                " `data`['user_id'] user_id,   " +
                " `data`['province_id'] province_id,   " +
                " `data`['origin_amount'] origin_amount,   " +
                " `data`['final_amount'] final_amount,   " +
                " `data`['create_time'] create_time,   " +
                " `data`['update_time'] update_time,   " +
                " `data`['order_status'] order_status   " +
                " from topic_db  " +
                " where `database`='edu'  " +
                " and `table`='order_info'  " +
                " and `type`='insert'" );
        tableEnv.createTemporaryView( "order_info_table", orderInfoTable );
        // TODO: 2023/3/28 消费订单明细表
        Table orderDetailTable = tableEnv.sqlQuery( "" +
                "select   " +
                " `data`['id'] id,   " +
                " `data`['course_id'] course_id,   " +
                " `data`['course_name'] course_name,   " +
                " `data`['order_id'] order_id,   " +
                " `data`['user_id'] user_id,   " +
                " `data`['session_id'] session_id,   " +
                " `data`['province_id'] province_id,   " +
                " `data`['origin_amount'] origin_amount,   " +
                " `data`['coupon_reduce'] coupon_reduce,   " +
                " `data`['final_amount'] final_amount,   " +
                " `data`['create_time'] create_time,   " +
                " `data`['update_time'] update_time,   " +
                " `data`['order_status'] order_status   " +
                " from topic_db  " +
                " where `database`='edu'  " +
                " and `table`='order_detail'  " +
                " and `type`='insert'" );
        tableEnv.createTemporaryView( "order_detail_table", orderDetailTable );
        // TODO: 2023/3/28    三表关联
        Table cart_orderDetail_Table = tableEnv.sqlQuery( "select " +
                "ci.id  ," +
                "ode.id  order_detail_id," +
                "ode.order_id order_id," +
                "ci.user_id , " +
                "ci.course_id , " +
                "ci.course_name , " +
                "ci.cart_price  ," +
                // TODO: 2023/3/29 加购表和订单明细表用 user_id 和 course_id 相等条件 关联 ，拿到 session_id
                "ci.session_id  ," +
                "ode.create_time  ," +
                "ci.deleted  ," +
                "ci.sold ," +
                "oi.origin_amount origin_amount," +
                "oi.final_amount final_amount " +
                " from order_detail_table ode " +
                " join cart_info_table ci  on ci.course_id=ode.course_id and ci.user_id=ode.user_id " +
                " join order_info_table oi on oi.id=ode.order_id " +
                "" );
        tableEnv.createTemporaryView( "two_relevance_table", cart_orderDetail_Table );
//        tableEnv.sqlQuery( "select * from cart_order_table" ).execute().print();

        // TODO: 2023/3/28 读取topic_log主题，读取common中的sid即session_id 关联购物车表的 session_id
        tableEnv.executeSql( "create table topic_log(" +
                "`appVideo` map<string,string>," +
                "`common` map<string,string>," +
                "`ts` string" +
                ")" + KafkaUtil_wm.getKafkaDDL( "topic_log", "topic_log_common" ) );
        Table topic_logTable = tableEnv.sqlQuery(
                " select   " +
                        "`common`['ar']   ar,   " +
                        "`common`['ba']   ba,   " +
                        "`common`['ch']   ch,   " +
                        "`common`['is_new']   is_new,   " +
                        "`common`['md']   md,   " +
                        "`common`['mid']   mid,   " +
                        "`common`['os']   os,   " +
                        "`common`['sc']   sc,   " + //todo 主要是拿到 sc 来源id 这个字段
                        "`common`['sid']   sid,   " +
                        "`common`['uid']   uid,   " +
                        "`common`['vc']   vc   " +
                        "from topic_log"
        );
        tableEnv.createTemporaryView( "logTable", topic_logTable );

        // TODO: 2023/3/29 两表关联结果表 再 和 日志表 sid 关联  session_id关联    拿到 sc 来源id
        Table resultTable = tableEnv.sqlQuery( "" +
                "select " +
                "logt.sc sc," +
                "twort.user_id," +
                "twort.order_detail_id," +
                "twort.order_id," +
                "twort.create_time create_time," +
                "twort.origin_amount origin_amount," +
                "twort.final_amount  final_amount " +
                " from logTable logt" +
                " join two_relevance_table twort on logt.sid = twort.session_id  " );
        tableEnv.createTemporaryView( "result_table", resultTable );

//        tableEnv.sqlQuery( "select * from result_table" ).execute().print();

        // TODO: 2023/3/29 写入kafka 目标主题 dwd_trade_source_reday
        tableEnv.executeSql( "" +
                "create table dwd_trade_source(" +
                "sc string," +
                "user_id string," +
                "order_detail_id string," +
                "order_id string," +
                "create_time string," +
                "origin_amount string," +
                "final_amount string " +
                ")" + KafkaUtil_wm.getKafkaSinkDDL( "dwd_trade_source_ready" ) );
        tableEnv.executeSql( "insert into dwd_trade_source select * from result_table " );
    }
}
