package com.atguigu.edu.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.MyKafkaUtil;

import java.time.Duration;

public class DwdTradeOrderDetail {
    public static void main(String[] args) {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(4);
        //只要在flink中表连接，就需要设置状态的ttl（默认是永不失效）
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(30));
        //2.检查点相关设置
        //3.创建连接器表，从ODS_BASE_DB中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDDL("user_order_group"));
        //4.查询出订单表的数据（表名是order_info，类型是insert），并进行注册
        Table orderInfoTable = tableEnv.sqlQuery("select " +
                " data['id'] id," +
                " data['user_id'] user_id," +
                " data['order_status'] order_status," +
                " data['province_id'] province_id," +
                " data['create_time'] create_time," +
                " data['update_time'] update_time," +
                " data['session_id'] session_id," +
                " `type`,"+
                " `old`," +
                " ts oi_ts" +
                " from ODS_BASE_DB" +
                " where `table`='order_info' and `type`='insert'");
        tableEnv.createTemporaryView("order_info",orderInfoTable);
        //5.查询出订单明细表的数据（表名是order_detail，类型是insert），并进行注册
        Table orderDetailTable = tableEnv.sqlQuery("select " +
                " data['id'] id," +
                " data['course_id'] course_id," +
                " data['course_name'] course_name," +
                " data['order_id'] order_id," +
                " data['user_id'] user_id," +
                " data['origin_amount'] origin_amount," +
                " data['coupon_reduce'] coupon_reduce," +
                " data['final_amount'] final_amount," +
                " ts od_ts" +
                " from ODS_BASE_DB" +
                " where `table`='order_detail' and `type`='insert'");
        tableEnv.createTemporaryView("order_detail",orderDetailTable);
        //6.查询页面日志的数据
        tableEnv.executeSql(
                " create table page_log(" +
                        " page map<string,string>," +
                        " common map<string,string>," +
                        " ts string" +
                        ") "+MyKafkaUtil.getKafkaDDL("ODS_BASE_LOG","dwd_trade_order_detail_group")
        );
        //7.查询出需要的数据
        Table pageLogTable = tableEnv.sqlQuery("select " +
                " common['sid'] session_id," +
                " common['sc'] source_id" +
                " from page_log");
        tableEnv.createTemporaryView("page_log_table",pageLogTable);
        //6.将三张表进行连接，并进行注册(详情表为主表，关联订单表)
        Table resTable = tableEnv.sqlQuery("select " +
                "od.id," +
                "od.course_id," +
                "od.course_name," +
                "od.order_id," +
                "od.user_id," +
                "od.origin_amount," +
                "od.coupon_reduce," +
                "od.final_amount," +
                "od.od_ts ," +

                "oi.order_status," +
                "oi.province_id," +
                "oi.create_time," +
                "oi.update_time," +
                "oi.`type`," +
                "oi.`old`," +
                "oi.oi_ts, " +

                " pl.source_id " +
                " from order_detail od " +
                " join order_info oi " +
                " on od.order_id=oi.id" +
                " join page_log_table pl" +
                " on oi.session_id=pl.session_id");
        tableEnv.createTemporaryView("res_table",resTable);
        //7.创建动态表和要写入的kafka主题进行映射
        tableEnv.executeSql("create table dwd_trade_order_detail(" +
                " `id` string," +
                " course_id string," +
                " course_name string," +
                " order_id string," +
                " user_id string," +
                " origin_amount string," +
                " coupon_reduce string," +
                " final_amount string," +
                " od_ts string," +

                " order_status string," +
                " province_id string," +
                " create_time string," +
                " update_time string," +
                " `type` string," +
                " `old` map<string,string>," +
                " oi_ts string," +
                " source_id string," +
                " primary key(id) not enforced " +
                ")"+MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_order_detail"));
        //8.将第六步的连接结果插入到动态表中，写入kafka的主题
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from res_table");
    }
}
