package com.atguigu.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.MyBroadcastFunction;
import com.atguigu.app.func.MyPhoenixSink;
import com.atguigu.bean.TableProcess;
import com.atguigu.util.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class DimSinkApp {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME","atguigu");
        //TODO 1 获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(1);

        //TODO 2 设置检查点和状态后端
        /*env.enableCheckpointing(5*60*1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3*60*1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.
                            ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        env.setStateBackend(new HashMapStateBackend());*/

        //TODO 读取kafka对应的数据
        String topicName = "topic_db";
        String groupId = "dim_sink_app";
        DataStreamSource<String> topicDbStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));

//        topicDbStream.print("topic_db>>>>>>>>");

        //TODO 4 转换格式和清洗过滤脏数据
        //不是json的数据
        //type类型为bootstrap-start 和bootstrap-complete
        /*SingleOutputStreamOperator<JSONObject> jsonObjStream = topicDbStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String type = jsonObject.getString("type");
                    if (!"bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                        out.collect(jsonObject);
                    }
                } catch (JSONException e) {
                    e.printStackTrace();
                }
            }
        });*/

        //将脏数据写入到测输出流中
        OutputTag<String> dirty = new OutputTag<String>("Dirty"){};
        SingleOutputStreamOperator<JSONObject> jsonObjectSingleOutputStreamOperator = topicDbStream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String type = jsonObject.getString("type");
                    if (!("bootstrap-start".equals(type) || "bootstrap-complete".equals(type))) {
                        out.collect(jsonObject);
                    } else {
                        //特殊类型没有意义的数据写入测输出流
                        ctx.output(dirty, value);
                    }
                } catch (JSONException e) {
                    e.printStackTrace();
                    //不为JSON写到测输出流
                    ctx.output(dirty, value);
                }
            }
        });

     /*   DataStream<String> dirtyStream = jsonObjectSingleOutputStreamOperator.getSideOutput(dirty);
        dirtyStream.print("dirty>>>>>>>>>");
        jsonObjectSingleOutputStreamOperator.print("topic_db>>>>>>>>>");*/

        //TODO 5 使用FlinkCDC读取配置表数据
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("W5433e123456")
                .databaseList("edu_config")
                .tableList("edu_config.table_process")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        DataStreamSource<String> mysql_source_Stream = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql_source");
        mysql_source_Stream.print("配置表>>>>>");

        //TODO 6 将配置流转换为广播流并且和主流连接
        //K:String(表名)   判断当前表是否为维度表
        //V：(后面的数据)   能够完成后续phoenix建表工作 这里写JavaBean
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("tableProcessState",
                String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = mysql_source_Stream.broadcast(mapStateDescriptor);

        BroadcastConnectedStream<JSONObject, String> connect = jsonObjectSingleOutputStreamOperator.connect(broadcastStream);

        //TODO 7 实现过滤
        SingleOutputStreamOperator<JSONObject> filterTableStream = connect.process(new MyBroadcastFunction(mapStateDescriptor));

//        filterTableStream.print("fiterTable>>>>>");
        /**
         * {
         *     "birthday": "2000-04-21",
         *     "sinkTable": "dim_user_info",
         *     "login_name": "h3fg6dsxs5u",
         *     "gender": "F",
         *     "create_time": "2022-02-21 02:53:50",
         *     "name": "费娅",
         *     "user_level": "2",
         *     "id": 1796
         * }
         */

        //TODO 8 将数据写入phoenix
        filterTableStream.addSink(new MyPhoenixSink());


        //TODO 执行任务
        env.execute(groupId);
    }
}
