package com.atguigu.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.atguigu.bean.TableProcess;
import com.atguigu.common.GmallConfig;
import com.atguigu.utils.MyKafkaUtil;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;

public class DbBaseApp3 {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1.1 设置状态后端
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/dwd_log/ck"));
//        //1.2 开启CK
//        env.enableCheckpointing(10000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);

        //2.读取Kafka数据
        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource("ods_base_db_m", "ods_db_group");
        DataStreamSource<String> kafkaDS = env.addSource(kafkaSource);

        //3.将每行数据转换为JSON对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(JSON::parseObject);

        //4.过滤
        SingleOutputStreamOperator<JSONObject> filterDS = jsonObjDS.filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject value) throws Exception {
                //获取data字段
                String data = value.getString("data");
                return data != null && data.length() > 0;
            }
        });

        //打印测试
//        filterDS.print();

        //5.创建MySQL CDC Source
        DebeziumSourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("000000")
                .databaseList("gmall2021-realtime")
                .tableList("gmall2021-realtime.table_process")
                .deserializer(new DebeziumDeserializationSchema<String>() {
                    //反序列化方法
                    @Override
                    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
                        //库名&表名
                        String topic = sourceRecord.topic();
                        String[] split = topic.split("\\.");
                        String db = split[1];
                        String table = split[2];
                        //获取数据
                        Struct value = (Struct) sourceRecord.value();
                        Struct after = value.getStruct("after");
                        JSONObject data = new JSONObject();
                        if (after != null) {
                            Schema schema = after.schema();
                            for (Field field : schema.fields()) {
                                data.put(field.name(), after.get(field.name()));
                            }
                        }
                        //获取操作类型
                        Envelope.Operation operation = Envelope.operationFor(sourceRecord);

                        //创建JSON用于存放最终的结果
                        JSONObject result = new JSONObject();
                        result.put("database", db);
                        result.put("table", table);
                        result.put("type", operation.toString().toLowerCase());
                        result.put("data", data);

                        collector.collect(result.toJSONString());
                    }

                    //定义数据类型
                    @Override
                    public TypeInformation<String> getProducedType() {
                        return TypeInformation.of(String.class);
                    }
                })
                .build();

        //6.读取MySQL数据
        DataStreamSource<String> tableProcessDS = env.addSource(sourceFunction);

        //7.将配置信息流作为广播流
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("table-process-state", String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = tableProcessDS.broadcast(mapStateDescriptor);

        //8.将主流和广播流进行链接
        BroadcastConnectedStream<JSONObject, String> connectedStream = filterDS.connect(broadcastStream);

        //9.根据广播过来的数据信息处理每一条数据
        OutputTag<JSONObject> hbaseTag = new OutputTag<JSONObject>(TableProcess.SINK_TYPE_HBASE) {
        };
        SingleOutputStreamOperator<JSONObject> result = connectedStream.process(new BroadcastProcessFunction<JSONObject, String, JSONObject>() {

            //定义Phoenix的连接
            private Connection connection = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                //初始化Phoenix的连接
                Class.forName(GmallConfig.PHOENIX_DRIVER);
                connection = DriverManager.getConnection(GmallConfig.PHOENIX_SERVER);
            }

            //根据配置信息中提供的字段做数据过滤
            private void filterColumn(JSONObject data, String sinkColumns) {

                //保留的数据字段
                String[] fields = sinkColumns.split(",");
                List<String> fieldList = Arrays.asList(fields);

                Set<Map.Entry<String, Object>> entries = data.entrySet();

//                while (iterator.hasNext()) {
//                    Map.Entry<String, Object> next = iterator.next();
//                    if (!fieldList.contains(next.getKey())) {
//                        iterator.remove();
//                    }

//            }
                entries.removeIf(next -> !fieldList.contains(next.getKey()));
            }

            @Override
            public void processElement(JSONObject jsonObject, ReadOnlyContext readOnlyContext, Collector<JSONObject> collector) throws Exception {

                //获取状态
                ReadOnlyBroadcastState<String, TableProcess> broadcastState = readOnlyContext.getBroadcastState(mapStateDescriptor);

                //获取表名和操作类型
                String table = jsonObject.getString("table");
                String type = jsonObject.getString("type");
                String key = table + ":" + type;

                //取出对应的配置信息数据
                TableProcess tableProcess = broadcastState.get(key);

                if (tableProcess != null) {

                    //向数据中追加sink_table信息
                    jsonObject.put("sink_table", tableProcess.getSinkTable());

                    //根据配置信息中提供的字段做数据过滤
                    filterColumn(jsonObject.getJSONObject("data"), tableProcess.getSinkColumns());

                    //判断当前数据应该写往HBASE还是Kafka
                    if (TableProcess.SINK_TYPE_KAFKA.equals(tableProcess.getSinkType())) {
                        //Kafka数据,将数据输出到主流
                        collector.collect(jsonObject);
                    } else if (TableProcess.SINK_TYPE_HBASE.equals(tableProcess.getSinkType())) {
                        //HBase数据,将数据输出到侧输出流
                        readOnlyContext.output(hbaseTag, jsonObject);
                    }

                } else {
                    System.out.println("No Key " + key + " In Mysql!");
                }
            }

            @Override
            public void processBroadcastElement(String jsonStr, Context context, Collector<JSONObject> collector) throws Exception {

                //获取状态
                BroadcastState<String, TableProcess> broadcastState = context.getBroadcastState(mapStateDescriptor);

                //将配置信息流中的数据转换为JSON对象{"database":"","table":"","type","","data":{"":""}}
                JSONObject jsonObject = JSON.parseObject(jsonStr);

                //取出数据中的表名以及操作类型封装key
                JSONObject data = jsonObject.getJSONObject("data");

                String table = data.getString("source_table");
                String type = data.getString("operate_type");
                String key = table + ":" + type;

                //取出Value数据封装为TableProcess对象
                TableProcess tableProcess = JSON.parseObject(data.toString(), TableProcess.class);

                checkTable(tableProcess.getSinkTable(), tableProcess.getSinkColumns(), tableProcess.getSinkPk(), tableProcess.getSinkExtend());

                System.out.println("Key:" + key + "," + tableProcess);

                //广播出去
                broadcastState.put(key, tableProcess);
            }

            /**
             * Phoenix建表
             *
             * @param sinkTable   表名       test
             * @param sinkColumns 表名字段   id,name,sex
             * @param sinkPk      表主键     id
             * @param sinkExtend  表扩展字段 ""
             *                    create table if not exists mydb.test(id varchar primary key,name varchar,sex varchar) ...
             */
            private void checkTable(String sinkTable, String sinkColumns, String sinkPk, String sinkExtend) {

                //给主键以及扩展字段赋默认值
                if (sinkPk == null) {
                    sinkPk = "id";
                }
                if (sinkExtend == null) {
                    sinkExtend = "";
                }

                //封装建表SQL
                StringBuilder createSql = new StringBuilder("create table if not exists ").append(GmallConfig.HBASE_SCHEMA).append(".").append(sinkTable).append("(");
                //遍历添加字段信息
                String[] fields = sinkColumns.split(",");
                for (int i = 0; i < fields.length; i++) {

                    //取出字段
                    String field = fields[i];

                    //判断当前字段是否为主键
                    if (sinkPk.equals(field)) {
                        createSql.append(field).append(" varchar primary key ");
                    } else {
                        createSql.append(field).append(" varchar ");
                    }

                    //如果当前字段不是最后一个字段,则追加","
                    if (i < fields.length - 1) {
                        createSql.append(",");
                    }
                }

                createSql.append(")");
                createSql.append(sinkExtend);

                System.out.println(createSql);

                //执行建表SQL
                PreparedStatement preparedStatement = null;
                try {
                    preparedStatement = connection.prepareStatement(createSql.toString());
                    preparedStatement.execute();
                } catch (SQLException e) {
                    e.printStackTrace();
                    throw new RuntimeException("创建Phoenix表" + sinkTable + "失败！");
                } finally {
                    if (preparedStatement != null) {
                        try {
                            preparedStatement.close();
                        } catch (SQLException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }
        });

        //打印数据
        result.print("Kafka>>>>>>>>>");
        DataStream<JSONObject> hbaseDS = result.getSideOutput(hbaseTag);

        //10.执行任务
        env.execute();

    }


}
