package cn.dmrliu.edu.realtime.app.dim;

import cn.dmrliu.edu.realtime.app.func.DimSinkFunction;
import cn.dmrliu.edu.realtime.app.func.DimTableProcessFunction;
import cn.dmrliu.edu.realtime.bean.TableProcess;
import cn.dmrliu.edu.realtime.util.KafkaUtil;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Properties;

/**
 * TODO vip变化表的添加
 */
public class dimApp {
    public static void main(String[] args) throws Exception {
        // TODO 1. 基本环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        // TODO 2. 检查点

        // TODO 3. 从kafka主题中读取数据
        String topic = "edu_db";
        String group = "edu_dim_group";
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(topic, group);
        DataStreamSource<String> kafkaDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka source");

        // TODO 4. 简单ETL，并转换数据格式为json
        SingleOutputStreamOperator<JSONObject> etlJsonObjectDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String jsonStr, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(jsonStr);
                    String type = jsonObject.getString("type");
                    if (!"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                        out.collect(jsonObject);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        // TODO 5. 创建配置流，并广播
        Properties properties = new Properties();
        properties.setProperty("useSSL", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("h102")
                .port(3306)
                .username("root")
                .password("j)k*x~y0)*n_L)!o{y?C`w(1rV_viw")
                .jdbcProperties(properties)
                .databaseList("edu_config")
                .tableList("edu_config.table_process")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();
        DataStreamSource<String> mysqlCDC = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql source");
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("edu_config.table_process", String.class, TableProcess.class);
        BroadcastStream<String> broadcastDS = mysqlCDC.broadcast(mapStateDescriptor);

        // TODO 6. 主流和广播流合并,并处理数据
        BroadcastConnectedStream<JSONObject, String> connectDS = etlJsonObjectDS.connect(broadcastDS);
        SingleOutputStreamOperator<JSONObject> processDS = connectDS.process(new DimTableProcessFunction(mapStateDescriptor));
//        processDS.print(">>>");

        // TODO 7. 写入hbase
        processDS.addSink(new DimSinkFunction());

        env.execute();
    }
}
