package com.atguigu.realtime.app.func;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.bean.CDCJavaBean;
import com.atguigu.realtime.common.EDUConfig;
import com.atguigu.realtime.utils.DruidPhoenixDSUtil;
import com.atguigu.realtime.utils.PhoenixUtil;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Array;
import java.sql.SQLException;
import java.util.*;

public class MyBroadcastProcessFunction extends BroadcastProcessFunction<JSONObject, String,JSONObject> {
    private MapStateDescriptor<String,CDCJavaBean> mapState;
    private DruidDataSource druidDataSource;
    public MyBroadcastProcessFunction(MapStateDescriptor<String,CDCJavaBean> mapState) {
        this.mapState = mapState;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        druidDataSource = DruidPhoenixDSUtil.getDataSource();
    }

    @Override
    public void processElement(JSONObject value, ReadOnlyContext ctx, Collector<JSONObject> out) throws Exception {
        ReadOnlyBroadcastState<String, CDCJavaBean> broadcastState = ctx.getBroadcastState(mapState);
        String table = value.getString("table");
        CDCJavaBean broadcastTable = broadcastState.get(table);
        if (broadcastTable != null){
            JSONObject data = value.getJSONObject("data");
            filterColumns(data,broadcastTable.getSinkColumns());
            data.put("sink_table",broadcastTable.getSinkTable());
            data.put("type", value.getString("type"));
            out.collect(data);
        }
    }

    private void filterColumns(JSONObject data, String sinkColumns) {
        Set<Map.Entry<String, Object>> entries = data.entrySet();
        Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
        String[] split = sinkColumns.split(",");
        List<String> cols = Arrays.asList(split);
        while (iterator.hasNext()) {
            Map.Entry<String, Object> next = iterator.next();
            if (!cols.contains(next.getKey())){
                iterator.remove();
            }
        }
    }

    @Override
    public void processBroadcastElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
        BroadcastState<String, CDCJavaBean> broadcastState = ctx.getBroadcastState(mapState);
        JSONObject jsonObject = JSONObject.parseObject(value);
        if ("d".equals(jsonObject.getString("op"))){
            String sourceTable = jsonObject.getJSONObject("before").getString("source_table");
            broadcastState.remove(sourceTable);
        }else {
            CDCJavaBean table = JSONObject.parseObject(jsonObject.getString("after"), CDCJavaBean.class);
            String sinkTable = table.getSinkTable();
            String sinkPk = table.getSinkPk();
            String sinkColumns = table.getSinkColumns();
            String sinkExtend = table.getSinkExtend();
            checkTable(sinkTable,sinkColumns,sinkPk,sinkExtend);
        }
    }

    private void checkTable(String sinkTable, String sinkColumns, String sinkPk, String sinkExtend) {
        StringBuilder sql = new StringBuilder();
        sql.append("create table if not exists ")
                .append(EDUConfig.HBASE_SCHEMA)
                .append(".")
                .append(sinkTable)
                .append(" (");
        if (sinkPk == null){
            sinkPk = "id";
        }
        if (sinkExtend == null){
            sinkExtend = "";
        }
        String[] split = sinkColumns.split(",");
        for (int i = 0; i < split.length; i++) {
            if (split[i].equals(sinkPk)){
                sql.append(split[i])
                        .append(" varchar primary key ");
            }else {
                sql.append(split[i])
                        .append(" varchar ");
            }
            if (i < split.length - 1 ){
                sql.append(",");
            }
        }
        sql.append(") ")
                .append(sinkExtend);

        DruidPooledConnection connection = null;
        try {
            connection = druidDataSource.getConnection();
            PhoenixUtil.executeSql(sql.toString(),connection);
        } catch (SQLException e) {
            e.printStackTrace();
            System.out.println("连接池获取失败");
        }
    }
}
