package com.atguigu.actual.edu0417.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.actual.edu0417.app.BaseApp;
import com.atguigu.actual.edu0417.beans.TableProcess;
import com.atguigu.actual.edu0417.common.EduConfig;
import com.atguigu.actual.edu0417.func.DimSinkFunction;
import com.atguigu.actual.edu0417.func.TableProcessFunction;
import com.atguigu.actual.edu0417.unils.HbaseUtil;
import com.atguigu.actual.edu0417.unils.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Properties;

/**
 * @author: techyang
 * @since: 2023-10-19 11:22
 * @description: DIM层动态分流
 **/
public class DimApp extends BaseApp{
    public static void main(String[] args) {
        DimApp dimApp = new DimApp();
        dimApp.base(args);
    }

    @Override
    public void process(StreamExecutionEnvironment env) {
        // 从主题topic_db读取维度数据
        // 消费者组
        String groupId = "dim_app_group";
        // kafkaSource
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(EduConfig.KAFKA_TOPIC_DB,groupId);
        // 读取数据,并封装为流
        DataStreamSource<String> kafkaStrDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
        // {"database":"edu","table":"review_info","type":"insert","ts":1697687528,"xid":74861,"commit":true,"data":{"id":3128,"user_id":393,"course_id":263,"review_txt":null,"review_stars":1,"create_time":"2023-10-19 11:52:08","deleted":"0"}}
        // kafkaStrDs.print("kafkaStrDs>>>");
        // 进行简单的etl,并转换为JsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaStrDs.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String jsonStr, ProcessFunction<String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObj = JSONObject.parseObject(jsonStr);
                    if (!"bootstrap-start".equals(jsonObj.getString("type"))
                            && !"bootstrap-complete".equals(jsonObj.getString("type"))
                    ) {
                        collector.collect(jsonObj);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        // 通过flinkCDC读取配置信息
        // Properties props = new Properties();
        // props.setProperty("useSSL", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop101")
                .port(3306)
                .databaseList("edu_config")
                .tableList("edu_config.table_process_dim")
                .username("root")
                .password("000000")
                // .jdbcProperties(props)
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .build();


        DataStreamSource<String> mysqlStrDs = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql_source");
        // {"before":null,"after":{"source_table":"user_info","sink_table":"dim_user_info","sink_family":"info","sink_columns":"id,login_name,nick_name,passwd,real_name,phone_num,email,user_level,birthday,gender","sink_row_key":"id"},"source":{"version":"1.6.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":0,"snapshot":"false","db":"edu_config","sequence":null,"table":"table_process_dim","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},"op":"r","ts_ms":1697698663423,"transaction":null}
        // mysqlStrDs:1> {"before":null,"after":{"source_table":"video_info1111","sink_table":"dim_video_info11111","sink_family":"info","sink_columns":"id,video_name,during_sec,video_status,video_size,video_source_id,version_id,chapter_id,course_id,publisher_id","sink_row_key":"id"},"source":{"version":"1.6.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1697698685000,"snapshot":"false","db":"edu_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000005","pos":4977,"row":0,"thread":null,"query":null},"op":"c","ts_ms":1697698685346,"transaction":null}
        // mysqlStrDs:1> {"before":{"source_table":"video_info4444","sink_table":"dim_video_info11111","sink_family":"info","sink_columns":"id,video_name,during_sec,video_status,video_size,video_source_id,version_id,chapter_id,course_id,publisher_id","sink_row_key":"id"},"after":{"source_table":"video_info5555","sink_table":"dim_video_info11111","sink_family":"info","sink_columns":"id,video_name,during_sec,video_status,video_size,video_source_id,version_id,chapter_id,course_id,publisher_id","sink_row_key":"id"},"source":{"version":"1.6.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1697698385000,"snapshot":"false","db":"edu_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000005","pos":2953,"row":0,"thread":null,"query":null},"op":"u","ts_ms":1697698385815,"transaction":null}
        // mysqlStrDs:1> {"before":{"source_table":"video_info5555","sink_table":"dim_video_info11111","sink_family":"info","sink_columns":"id,video_name,during_sec,video_status,video_size,video_source_id,version_id,chapter_id,course_id,publisher_id","sink_row_key":"id"},"after":null,"source":{"version":"1.6.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1697698397000,"snapshot":"false","db":"edu_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000005","pos":3581,"row":0,"thread":null,"query":null},"op":"d","ts_ms":1697698397297,"transaction":null}
        // mysqlStrDs.print("mysqlStrDs");
        // 转换为实体类对象
        SingleOutputStreamOperator<TableProcess> beanDs = mysqlStrDs.map(new MapFunction<String, TableProcess>() {
            @Override
            public TableProcess map(String jsonStr) throws Exception {
                JSONObject jsonObj = JSONObject.parseObject(jsonStr);
                String op = jsonObj.getString("op");
                TableProcess tableProcess = new TableProcess();
                if ("d".equals(op)) {
                    JSONObject before = jsonObj.getJSONObject("before");
                    tableProcess.setSourceTable(before.getString("source_table"));
                    tableProcess.setSinkTable(before.getString("sink_table"));
                    tableProcess.setSinkFamily(before.getString("sink_family"));
                    tableProcess.setSinkColumns(before.getString("sink_columns"));
                    tableProcess.setSinkRowKey(before.getString("sink_row_key"));
                    tableProcess.setOp(op);
                } else {
                    JSONObject after = jsonObj.getJSONObject("after");
                    tableProcess.setSourceTable(after.getString("source_table"));
                    tableProcess.setSinkTable(after.getString("sink_table"));
                    tableProcess.setSinkFamily(after.getString("sink_family"));
                    tableProcess.setSinkColumns(after.getString("sink_columns"));
                    tableProcess.setSinkRowKey(after.getString("sink_row_key"));
                    tableProcess.setOp(op);
                }
                return tableProcess;
            }
        });

        // 根据操作类型在HBase中建表或者删表
        SingleOutputStreamOperator<TableProcess> dealTableDs = beanDs.process(new ProcessFunction<TableProcess, TableProcess>() {

            private Connection conn;

            @Override
            public void open(Configuration parameters) throws Exception {
                conn = HbaseUtil.getHbaseConnection();
            }

            @Override
            public void close() throws Exception {
                HbaseUtil.closeHbaseConnection(conn);
            }

            @Override
            public void processElement(TableProcess tableProcess, ProcessFunction<TableProcess, TableProcess>.Context ctx, Collector<TableProcess> out) throws Exception {
                String op = tableProcess.getOp();
                String sinkTableName = tableProcess.getSinkTable();
                String sinkFamily = tableProcess.getSinkFamily();
                if ("d".equals(op)) {
                    HbaseUtil.dropHbaseTable(conn, EduConfig.HBASE_NAMESPACE, sinkTableName);
                } else if ("u".equals(op)) {
                    // 先删除表
                    HbaseUtil.dropHbaseTable(conn, EduConfig.HBASE_NAMESPACE, sinkTableName);
                    // 再建新表
                    HbaseUtil.createHbaseTable(conn, EduConfig.HBASE_NAMESPACE, sinkTableName, sinkFamily);
                } else {
                    // 建新表
                    HbaseUtil.createHbaseTable(conn, EduConfig.HBASE_NAMESPACE, sinkTableName, sinkFamily);
                }
                out.collect(tableProcess);
            }
        });
        // dealTableDs.print("dealTableDs>>>");
        // 将配置进行广播
        MapStateDescriptor<String, TableProcess> mapStateDescriptor
                = new MapStateDescriptor<String, TableProcess>("mapStateDescriptor",String.class, TableProcess.class);
        BroadcastStream<TableProcess> broadcastDs = dealTableDs.broadcast(mapStateDescriptor);
        // 和主流进行关联
        BroadcastConnectedStream<JSONObject, TableProcess> connectDs = jsonObjDs.connect(broadcastDs);
        // 过滤出维度数据
        SingleOutputStreamOperator<JSONObject> dimTableJsonObjDs = connectDs.process(new TableProcessFunction(mapStateDescriptor));
        // dimTableJsonObjDs.print("dimTableJsonObjDs>>>");  todo ⚠️ 这里会疯狂输出,永不停止,也就意味着会不停往hbase里put数据,后期再排查
        // 将维度数据写入HBase
        dimTableJsonObjDs.addSink(new DimSinkFunction());
    }
}
