package com.atguigu.gmall.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.app.func.MyBroadcastFunction;
import com.atguigu.gmall.bean.TableProcess;
import com.atguigu.gmall.sink.MyPhoenixSink;
import com.atguigu.gmall.utils.KafkaUtil;

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import scala.reflect.internal.Trees;

public class DimSinkApp {
    public static void main(String[] args) {
        // TODO 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // TODO 获取kafka数据
        String topicName = "topic_db";
        String groupID = "dim_sink_app";
        DataStreamSource<String> stringDataStreamSource = env.addSource(KafkaUtil.getFlinkConsumer(topicName, groupID));
        stringDataStreamSource.print("stringDataStreamSource>>>>>>>>>>>>>>>>>>>>>>");
        // TODO 过滤数据
        OutputTag<String> outputTag = new OutputTag<String>("Dirty") {};
        SingleOutputStreamOperator<JSONObject> type = stringDataStreamSource.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String type = jsonObject.getString("type");
                    if ("bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                        ctx.output(outputTag, value);
                    } else {
                        out.collect(jsonObject);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    ctx.output(outputTag, value);
                }

            }
        });

        // TODO 读取配置表数据
        MySqlSource<String> build = MySqlSource.<String>builder()
                .hostname("hadoop100")
                .port(3306)
                .username("root")
                .password("Atguigu123456")
                .databaseList("gmall_config")
                .tableList("gmall_config.table_process")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        DataStreamSource<String> mysql_source = env.fromSource(build, WatermarkStrategy.noWatermarks(), "mysql_source");
        mysql_source.print();
        MapStateDescriptor<String, TableProcess> tableProcessState = new MapStateDescriptor<>("tableProcessState", String.class, TableProcess.class);
        BroadcastStream<String> broadcast = mysql_source.broadcast(tableProcessState);

        BroadcastConnectedStream<JSONObject, String> connect = type.connect(broadcast);
        SingleOutputStreamOperator<JSONObject> process = connect.process(new MyBroadcastFunction(tableProcessState));
        process.print();
        process.addSink(new MyPhoenixSink());
        //TODO  执行任务
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("env.execute()执行失败");
        }
    }
}
