package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

public class DwdTradeOrderPreProcess {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4 设置状态的失效时间
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(15 * 60 + 5));
        //从kafka 主题读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_order_pre_process_group"));
        //过滤订单明细数据
        Table orderDetail = tableEnv.sqlQuery("select \n" +
                "data['id'] id,\n" +
                "data['course_id'] course_id,\n" +
                "data['course_name'] course_name,\n" +
                "data['order_id'] order_id,\n" +
                "data['user_id'] user_id,\n" +
                "data['origin_amount'] origin_amount,\n" +
                "data['coupon_reduce'] coupon_reduce,\n" +
                "data['final_amount'] final_amount,\n" +
                "ts od_ts,\n" +
                "proc_time\n" +
                "from `topic_db` where `table` = 'order_detail' " +
                "and `type` = 'insert'\n");
        tableEnv.createTemporaryView("order_detail", orderDetail);
        //过滤订单数据
        Table orderInfo = tableEnv.sqlQuery(
                "select \n" +
                        "data['id'] id,\n" +
                        "data['user_id'] user_id,\n" +

                        "data['order_status'] order_status,\n" +
                        "data['session_id'] session_id,\n" +
                        "data['province_id'] province_id,\n" +
                        "data['update_time'] operate_time,\n" +
                        "`type`,\n" +
                        "`old`,\n" +
                        "ts oi_ts\n" +
                        "from `topic_db`\n" +
                        "where `table` = 'order_info'\n" +
                        "and (`type` = 'insert' or `type` = 'update')");
        tableEnv.createTemporaryView("order_info", orderInfo);
//对两张表进行连接
        Table resultTable = tableEnv.sqlQuery("select \n" +
                "od.id,\n" +
                "od.course_id,\n" +
                "od.course_name,\n" +
                "od.order_id,\n" +
                "od.user_id,\n" +
                "od.origin_amount,\n" +
                "od.coupon_reduce,\n" +
                "od.final_amount,\n" +
                "od.od_ts,\n" +


                "oi.order_status,\n" +
                "oi.session_id,\n" +
                "oi.province_id,\n" +
                "date_format(oi.operate_time, 'yyyy-MM-dd') operate_date_id,\n" +
                "oi.operate_time,\n" +
                "oi.`type`,\n" +
                "oi.`old`,\n" +
                "oi.oi_ts,\n" +
                "current_row_timestamp() row_op_ts\n" +
                "from order_detail od \n" +
                "join order_info oi\n" +
                "on od.order_id = oi.id\n");
        tableEnv.createTemporaryView("result_table", resultTable);
//创建一个动态表和要写出的kafka主题进行映射
      tableEnv.executeSql("" +
              "create table dwd_trade_order_pre_process(\n" +
              "id string,\n" +
              "course_id string,\n" +
              "course_name string, \n" +
              "order_id string, \n" +
              "user_id string,\n" +
              "origin_amount string,\n" +
              "coupon_amount string,\n" +
              "final_amount string,\n" +
              "od_ts string,\n" +


              "order_status string,\n" +
              "session_id string,\n" +
              "province_id string,\n" +
              "session_id string,\n" +
              "operate_date_id string,\n" +
              "operate_time string,\n" +
              "`type` string,\n" +
              " `old` map<string,string>,\n" +
              " oi_ts string, \n" +
              " row_op_ts timestamp_ltz(3),\n" +
              " primary key(id) not enforced\n" +
      ")"+MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_pre_process")
      );

//{"id":"75146","course_id":"46","course_name":"Spring4.0","order_id":"65827","user_id":"1040","origin_amount":"200.0","coupon_amount":"0.0","final_amount":"200.0","od_ts":"1668824651","session_id":"37030668-62dc-4452-96a6-3476d3a89628","province_id":"30","operate_date_id":null,"operate_time":null,"row_op_ts":"2022-11-19 02:24:10.97Z"}
       //  tableEnv.executeSql("select * from result_table").print();
//关联的结果写到kafka的主题中
        //{"common":{"ar":"12","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_333","os":"Android 11.0","sc":"2","sid":"e7251ca7-ea21-4c75-ba92-8d3a475bd8f0","uid":"40","vc":"v2.1.111"},"displays":[{"display_type":"recommend","item":"3","item_type":"course_id","order":1,"pos_id":5},{"display_type":"query","item":"5","item_type":"course_id","order":2,"pos_id":5},{"display_type":"promotion","item":"8","item_type":"course_id","order":3,"pos_id":4},{"display_type":"query","item":"3","item_type":"course_id","order":4,"pos_id":4},{"display_type":"query","item":"9","item_type":"course_id","order":5,"pos_id":2},{"display_type":"query","item":"1","item_type":"course_id","order":6,"pos_id":3}],"page":{"during_time":17649,"item":"61980","item_type":"order_id","last_page_id":"order","page_id":"payment"},"ts":1668790809053}
        tableEnv.executeSql( "insert into dwd_trade_order_pre_process \n" +
                "select * from result_table");
    }
}
