package com.atguigu.edu.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.MyKafkaUtil;

public class DwdUserRegister {
    public static void main(String[] args) {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(4);
        //2.检查点相关设置

        //3.从kafka的ODS_BASE_DB主题、和dwd_traffic_start_log主题中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDDL("user_login_group"));
        //筛选出用户表的新增数据
        Table userRegisterTable = tableEnv.sqlQuery("select " +
                " data['id'] id," +
                " data['login_name'] login_name," +
                " data['gender'] gender," +
                " ts" +
                " from ODS_BASE_DB" +
                " where `table`='user_info' and `type`='insert'");
        tableEnv.createTemporaryView("user_register",userRegisterTable);
        //将用户注册数据写入到kafka的主题中
        tableEnv.executeSql("create table dwd_user_register" +
                "(" +
                " id string," +
                " login_name string," +
                " gender string," +
                " ts string," +
                " primary key (id) not enforced" +
                ")"+ MyKafkaUtil.getUpsertKafkaDDL("dwd_user_register"));
tableEnv.executeSql("insert into dwd_user_register select * from user_register");
    }
}
