package cn.dmrliu.edu.realtime.app.dwd.db;

import cn.dmrliu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTradeOrderDetail {
    public static void main(String[] args) throws Exception {
        // TODO 1.基本环境的准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2.检查点的设置

        // TODO 3.从kafka中读取数据，创建动态表
        String group = "edu_dwd_trade_order_group";
        tableEnv.executeSql(KafkaUtil.getEduDbDDL(group));

        // TODO 4.筛选订单明细数据
        Table orderDetail = tableEnv.sqlQuery("select \n" +
                " `data`['id'] id,\n" +
                " `data`['course_id'] course_id,\n" +
                " `data`['course_name'] course_name,\n" +
                " `data`['order_id'] order_id,\n" +
                " `data`['origin_amount'] origin_amount,\n" +
                " `data`['coupon_reduce'] coupon_reduce,\n" +
                " `data`['final_amount'] final_amount,\n" +
                " `data`['create_time'] create_time,\n" +
                " ts,\n" +
                " proc_time\n" +
                "from edu_db\n" +
                "where `table` = 'order_detail'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("order_detail", orderDetail);

//        tableEnv.executeSql("select * from order_detail").print();

        // TODO 5.筛选订单数据
        Table orderInfo = tableEnv.sqlQuery("select \n" +
                " `data`['id'] id,\n" +
                " `data`['user_id'] user_id,\n" +
                " `data`['province_id'] province_id\n" +
                "from edu_db\n" +
                "where `table` = 'order_info'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("order_info", orderInfo);

//        tableEnv.executeSql("select * from order_info").print();

        // TODO 6.join
        Table joinTable = tableEnv.sqlQuery("select \n" +
                "  od.id,\n" +
                "  od.order_id,\n" +
                "  oi.user_id,\n" +
                "  od.course_id,\n" +
                "  od.course_name,\n" +
                "  od.origin_amount,\n" +
                "  od.coupon_reduce,\n" +
                "  od.final_amount,\n" +
                "  oi.province_id,\n" +
                "  date_format(od.create_time, 'yyyy-MM-dd') date_id,\n" +
                "  od.ts\n" +
                "from order_detail od\n" +
                "join order_info oi\n" +
                "on od.order_id = oi.id");
        tableEnv.createTemporaryView("join_table", joinTable);

//        tableEnv.executeSql("select * from join_table").print();


        // TODO 8.写入kafka中
        tableEnv.executeSql("CREATE TABLE dwd_trade_order_detail (\n" +
                "  id string,\n" +
                "  order_id string,\n" +
                "  user_id string,\n" +
                "  course_id string,\n" +
                "  course_name string,\n" +
                "  origin_amount string,\n" +
                "  coupon_reduce string,\n" +
                "  final_amount string,\n" +
                "  province_id string,\n" +
                "  date_id string,\n" +
                "  ts string,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")"
                + KafkaUtil.getUpsertKafkaDDL("edu_dwd_trade_order_detail")
        );
        tableEnv.executeSql("insert into dwd_trade_order_detail select * from join_table");
    }
}
