package com.mai.realtime.app.dwd.db;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Creator: LBG
 * @CreateTime: 2022-09-01  09:35
 */
public class dwd_trade_pay_detail_suc_inc {

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 3003);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(5);

//        env.enableCheckpointing(3000);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://39.108.71.119:8020/gmall/" + "dwd_trade_cart_add_inc");
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(20 * 1000);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 给sql job起名字
//        tEnv.getConfig().getConfiguration().setString("pipeline.name", "dwd_trade_cart_add_inc");




        //读取ods_db数据
        tEnv.executeSql("create table ods_db(" +
                "`database` string," +
                "`table` string," +
                "`type` string," +
                "`ts` string," +
                "`xid` string," +
                "`commit` string," +
                "`data` map<string,string>)" +
                "with(" +
                "'connector'='kafka'," +
                "'properties.bootstrap.servers'='hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                "'properties.group.id'='dwd_trade_pay_detail_suc_inc'," +
                "'topic'='ods_db'," +
                "'format'='json'," +
                "'scan.startup.mode'='latest-offset')");                      //latest  earliest

      //date_format(data['create_time'],'yyyy-MM-dd')
        //过滤出支付表payment_info

        Table paymentInfo = tEnv.sqlQuery("select  " +
                "data['order_id'] order_id, " +
                "data['payment_type'] payment_type, " +
                "data['payment_status'] payment_status, " +
                "data['create_time'] create_time, " +
                "data['callback_content'] callback_content, " +
                "data['callback_time'] callback_time   ," +
                "ts dt  " +
                "from ods_db            " +
                "where  `database`='gmall' and  " +
                "`table`='payment_info' and " +
                "`type`='insert' ");
        tEnv.createTemporaryView("paymentInfo",paymentInfo);



        //过滤出order_detail

        Table orderDetail = tEnv.sqlQuery("select   " +
                "data['id'] id," +
                "data['course_id'] course_id," +
                "data['course_name'] course_name," +
                "data['order_id'] order_id," +
                "data['user_id'] user_id," +
                "data['origin_amount'] origin_amount," +
                "data['coupon_reduce'] coupon_reduce," +
                "data['final_amount'] final_amount," +
                " date_format(  data['create_time'], 'yyyy-MM-dd') date_id," +
                "data['create_time'] create_time," +
                "data['update_time'] update_time   " +
                "from ods_db            " +
                "where  `database`='gmall' and  " +
                "`table`='order_detail' and " +
                "`type`='insert' ");
        tEnv.createTemporaryView("orderDetail", orderDetail);


        //join

        Table payResult = tEnv.sqlQuery("select   " +
                "od.id," +
                "od.order_id," +
                "od.course_id," +
                "od.course_name," +
                "pi.payment_type," +
                "od.date_id," +
                "pi.callback_time," +
                "od.origin_amount," +
                "od.coupon_reduce," +
                "od.final_amount," +
                "pi.dt   " +
                "from paymentInfo pi join orderDetail od   " +
                "on pi.order_id = od.order_id");
        tEnv.createTemporaryView("payResult", payResult);


        // 定义一个动态表与kafka的topic关联

        tEnv.executeSql("create table dwd_trade_pay_detail_suc_inc(" +
                "id string," +
                "order_id string," +
                "course_id string," +
                "course_name string," +
                "payment_type string," +
                "date_id string," +
                "callback_time string," +
                "origin_amount string," +
                "coupon_reduce string," +
                "final_amount string," +
                "dt string)" +
                "with(" +
                "'connector'='kafka'," +
                "'properties.bootstrap.servers'='hadoop162:9092'," +
                "'topic'='dwd_trade_pay_detail_suc_inc'," +
                "'format'='json')");


        //写入kafka 对应的topic

        payResult.executeInsert("dwd_trade_pay_detail_suc_inc");

    }
}
