package com.mai.realtime.app.dwd.db;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Creator: LBG
 * @CreateTime: 2022-09-01  09:35
 */
public class dwd_trade_cart_add_inc {

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 3001);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(5);

//        env.enableCheckpointing(3000);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://39.108.71.119:8020/gmall/" + "dwd_trade_cart_add_inc");
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(20 * 1000);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 给sql job起名字
//        tEnv.getConfig().getConfiguration().setString("pipeline.name", "dwd_trade_cart_add_inc");










        //读取ods_db数据
        tEnv.executeSql("create table ods_db(" +
                "`database` string," +
                "`table` string," +
                "`type` string," +
                "`ts` string," +
                "`xid` string," +
                "`commit` string," +
                "`data` map<string,string>)" +
                "with(" +
                "'connector'='kafka'," +
                "'properties.bootstrap.servers'='hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                "'properties.group.id'='dwd_trade_cart_add_inc'," +
                "'topic'='ods_db'," +
                "'format'='json'," +
                "'scan.startup.mode'='latest-offset')");                      //latest  earliest

//date_format(data['create_time'],'yyyy-MM-dd')
        //过滤出cart_info
        Table userInfo = tEnv.sqlQuery(" select " +
                "data['id'] id," +
                "data['user_id'] user_id," +
                "data['course_id'] course_id," +
                "data['course_name'] course_name," +
                "data['cart_price'] cart_price," +
                "date_format(data['create_time'],'yyyy-MM-dd') date_id," +
                "data['create_time'] create_time," +
                "data['update_time'] update_time," +
                "data['deleted'] deleted," +
                "data['sold'] sold," +
                " ts    dt " +
                "from ods_db " +
                "where  `database`='gmall' and  " +
                "`table`='cart_info' and " +
                "`type`='insert' ");
        tEnv.createTemporaryView("userInfo",userInfo);

        // 定义一个动态表与kafka的topic关联
        tEnv.executeSql("create table dwd_trade_cart_add_inc(" +
                "`id` string," +
                "`user_id` string," +
                "`course_id` string," +
                "`course_name` string," +
                "`cart_price` string," +
                "`date_id` string," +
                "`create_time` string," +
                "`update_time` string," +
                "`deleted` string," +
                "`sold` string," +
                "`dt` string)" +
                "with(" +
                "'connector'='kafka'," +
                "'properties.bootstrap.servers'='hadoop162:9092'," +
                "'topic'='dwd_trade_cart_add_inc'," +
                "'format'='json')");


        //写入kafka 对应的topic
        userInfo.executeInsert("dwd_trade_cart_add_inc");

    }
}
