package com.atguigu.actual.edu0417.func;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.actual.edu0417.beans.BaseDbTableProcess;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.*;
import java.util.*;

/**
 * @author: 洛尘
 * @since: 2023-10-20 23:44
 * @description: dwd动态分流合流处理
 **/
public class BaseDbTableProcessFunction extends BroadcastProcessFunction<JSONObject, String, JSONObject> {
    //因为需要用到flinkcdc，且为了提前拿到配置表信息，需要用open方法状态来调用所以接一个描述器
    private MapStateDescriptor<String, BaseDbTableProcess> broadCastDes;
//这个map的原因？？忘了
    private Map<String,BaseDbTableProcess> configMap = new HashMap<>();

    public BaseDbTableProcessFunction(MapStateDescriptor<String, BaseDbTableProcess> broadCastDes) {
        this.broadCastDes = broadCastDes;
    }

    //1.先处理广播流数据 2.再处理主流数据3.再写open方法

    @Override
    public void open(Configuration parameters) throws Exception {
        //1.注册驱动，2.获取连接，写sql，获取数据库操作对象3.执行sql4.处理结果集5.最重要的一步，释放资源
        Class.forName("com.mysql.cj.jdbc.Driver");
        Connection connection= DriverManager.getConnection("jdbc:mysql://hadoop101:3306/edu_config?user=root&password=000000&useUnicode=true&characterEncoding=utf8&serverTimeZone=Asia/Shanghai&useSSL=false");
        String sql="select * from table_process_dwd";
        PreparedStatement preparedStatement = connection.prepareStatement(sql);

        ResultSet resultSet = preparedStatement.executeQuery();
        ResultSetMetaData metaData = resultSet.getMetaData();
        while (resultSet.next()){
            JSONObject jsonObject = new JSONObject();
            for (int i = 1; i <= metaData.getColumnCount(); i++) {
                String columnName = metaData.getColumnName(i);
                Object columnValue = resultSet.getObject(i);
                jsonObject.put(columnName,columnValue);
            }
            BaseDbTableProcess baseDbTableProcess = jsonObject.toJavaObject(BaseDbTableProcess.class);
            String sourceTable = baseDbTableProcess.getSourceTable();
            String sourceType = baseDbTableProcess.getSourceType();
            String key=sourceTable+":"+sourceType;
            configMap.put(key,baseDbTableProcess);
        }

        resultSet.close();
        preparedStatement.close();
        connection.close();
    }

    //处理主流数据
    @Override
    public void processElement(JSONObject jsonObj, ReadOnlyContext ctx, Collector<JSONObject> out) throws Exception {
        String table = jsonObj.getString("table");

        String type = jsonObj.getString("type");

        String key=table+":"+type;

        ReadOnlyBroadcastState<String, BaseDbTableProcess> broadcastState = ctx.getBroadcastState(broadCastDes);

        BaseDbTableProcess baseDbTableProcess=broadcastState.get(key)==null?configMap.get(key):broadcastState.get(key);
        if (Objects.nonNull(baseDbTableProcess)){
        //配置信息不为空，发送到下游
            JSONObject dataObj = jsonObj.getJSONObject("data");
            //传递之前过滤掉不需要的或者重复的数据
            String sinkColumns = baseDbTableProcess.getSinkColumns();
            filterColumns(dataObj,sinkColumns);
            //补充kafka主题数据
            String sinkTable = baseDbTableProcess.getSinkTable();
            dataObj.put("sink_table",sinkTable);
        //补充时间时间
            Long ts = jsonObj.getLong("ts");
            dataObj.put("ts",ts);
            out.collect(dataObj);
        }
    }
    public void filterColumns(JSONObject dataObj,String sinkCol){
        String[] colArr = sinkCol.split(",");
        List<String> colList = Arrays.asList(colArr);
        Set<Map.Entry<String, Object>> entrySet = dataObj.entrySet();
        entrySet.removeIf(extry->!colList.contains(extry.getKey()));
    }
    //处理广播流数据
    @Override
    public void processBroadcastElement(String jsonStr, Context ctx, Collector<JSONObject> out) throws Exception {
        BroadcastState<String, BaseDbTableProcess> broadcastState = ctx.getBroadcastState(broadCastDes);
        JSONObject jsonObject = JSONObject.parseObject(jsonStr);
        if ("d".equals(jsonObject.getString("op"))){
            //如果是删除操作
            //那么flinkcdc拿到的数据只有before有数据
            BaseDbTableProcess before = jsonObject.getObject("before", BaseDbTableProcess.class);
            String sourceTable = before.getSourceTable();
            String sourceType = before.getSourceType();
            String key=sourceTable+":"+sourceType;
            broadcastState.remove(key);
            configMap.remove(key);
        }else {
            //剩下的操作数据都在after
            BaseDbTableProcess after = jsonObject.getObject("after", BaseDbTableProcess.class);
            String sourceTable = after.getSourceTable();
            String sourceType = after.getSourceType();
//            String sinkColumns = after.getSinkColumns();整个数据都放进状态中，不是只有列
            String key=sourceTable+":"+sourceType;
            broadcastState.put(key,after);
            configMap.put(key,after);
        }
    }
}