package com.atguigu.edu.app.dwd.db;

import com.atguigu.edu.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


/**
 * 测试：maxwell、zk、kafka、DwdPayDetailSuc
 */
public class DwdPayDetailSuc {
    public static void main(String[] args) {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(4);
        //2.检查点相关设置

        //3.创建kafka的连接器表，读取kafka的ODS_BASE_DB的数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDDL("user_pay_group"));
        //4.查询出支付成功的数据（表名是payment_info，类型是update，payment_status的状态是1602，回调信息不为空）
        Table paySucTable = tableEnv.sqlQuery("select " +
                " data['id'] id," +
                " data['order_id'] order_id," +
                " data['total_amount'] total_amount," +
                " data['payment_type'] payment_type," +
                " data['payment_status'] payment_status," +
                " data['create_time'] create_time," +
                " data['update_time'] update_time," +
                " data['callback_content'] callback_content," +
                " data['callback_time'] callback_time," +
                " ts" +
                " from ODS_BASE_DB" +
                " where `table`='payment_info'");
        tableEnv.createTemporaryView("pay_suc",paySucTable);
        //5.创建动态表，和kafka的主题做映射
        tableEnv.executeSql("create table dwd_trade_pay_suc" +
                "(" +
                "id string," +
                "order_id string," +
                "total_amount string," +
                "payment_type string," +
                "payment_status string," +
                "create_time string," +
                "update_time string," +
                "callback_content string," +
                "callback_time string," +
                "ts string," +
                "primary key(id) not enforced" +
                ")"+MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_pay_suc"));
        //6.将第4步查询到的数据插入到动态表中，写到kafka的主题
        tableEnv.executeSql("insert into dwd_trade_pay_suc select * from pay_suc");
    }
}
