package com.atguigu.app.func;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TableProcess;
import com.atguigu.common.GmallConfig;
import com.atguigu.util.DruidPhoenixDSUtil;
import com.atguigu.util.PhoenixUtil;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.SQLException;
import java.util.*;

public class MyBroadcastFunction extends BroadcastProcessFunction<JSONObject, String, JSONObject> {
    private MapStateDescriptor<String, TableProcess> mapStateDescriptor;

    private DruidDataSource druidDataSource;


    public MyBroadcastFunction(MapStateDescriptor<String, TableProcess> mapStateDescriptor) {
        this.mapStateDescriptor = mapStateDescriptor;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        druidDataSource = DruidPhoenixDSUtil.getDataSource();
    }

    @Override
    public void processBroadcastElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
        // TODO 1 解析配置流数据
        // 如果配置表删除数据
        BroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(mapStateDescriptor);
        try {
            JSONObject jsonObject = JSON.parseObject(value);
            String op = jsonObject.getString("op");
            if ("d".equals(op)) {
                // 只需要删除对应状态  不再往hbase中写数据  不删除hbase数据
                JSONObject before = jsonObject.getJSONObject("before");
                String sourceTable = before.getString("source_table");
                broadcastState.remove(sourceTable);
            } else {
                // TODO 2 检查并创建表格
                String after = jsonObject.getString("after");
                TableProcess tableProcess = JSON.parseObject(after, TableProcess.class);
                String sinkPk = tableProcess.getSinkPk();
                String sinkTable = tableProcess.getSinkTable();
                String sinkColumns = tableProcess.getSinkColumns();
                String sinkExtend = tableProcess.getSinkExtend();
                checkTable(sinkPk, sinkTable, sinkColumns, sinkExtend);
                // TODO 3 将内容写入到状态
                broadcastState.put(tableProcess.getSourceTable(), tableProcess);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }


    }

    //检查表格是否存在
    private void checkTable(String sinkPk, String sinkTable, String sinkColumns, String sinkExtend) {
        if (sinkPk == null) {
            sinkPk = "id";
        }
        if (sinkExtend == null) {
            sinkExtend = "";
        }
        String[] split = sinkColumns.split(",");
        DruidPooledConnection connection=null;
        try {
            connection = druidDataSource.getConnection();
            StringBuilder stringBuilder = new StringBuilder();
            stringBuilder.append("create table if not exists ")
                    .append(GmallConfig.HBASE_SCHEMA)
                    .append(".")
                    .append(sinkTable)
                    .append("(");
            for (int i = 0; i < split.length; i++) {
                if (split[i].equals(sinkPk)) {
                    stringBuilder.append(split[i])
                            .append(" varchar not null primary key");
                } else {
                    stringBuilder.append(split[i])
                            .append(" varchar ");
                }
                if (i < split.length - 1) {
                    stringBuilder.append(",");
                }
            }
            stringBuilder.append(")").append(sinkExtend);
            //System.out.println(stringBuilder.toString());
            PhoenixUtil.sqlExecute(connection, stringBuilder.toString());
        } catch (SQLException e) {
            throw new RuntimeException(e);
        } finally {
            if (connection != null) {
                try {
                    connection.close();
                } catch (SQLException e) {
                    throw new RuntimeException(e);
                }
            }
        }
    }

    @Override
    public void processElement(JSONObject value, ReadOnlyContext ctx, Collector<JSONObject> out) throws Exception {
        // TODO 1  读取对应表格的配置状态
        ReadOnlyBroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(mapStateDescriptor);
        String tableName = value.getString("table");
        TableProcess tableProcess = broadcastState.get(tableName);
        if(tableProcess!=null){
            // TODO 2 根据状态中的输出列过滤字段
            JSONObject data = value.getJSONObject("data");
            String sinkColumns = tableProcess.getSinkColumns();
            filterColumns(data,sinkColumns);
            // TODO 3 添加sinkTable写出
            data.put("sink_table",tableProcess.getSinkTable());
            out.collect(data);
        }
    }

    private void filterColumns(JSONObject data, String sinkColumns) {
        //判断data里是否有sinkColumns中的字段，有的话就保留，没有的话从data里删除
        List<String> columns = Arrays.asList(sinkColumns.split(","));
        Set<Map.Entry<String, Object>> entries = data.entrySet();
        Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
        while(iterator.hasNext()){
            Map.Entry<String, Object> next = iterator.next();
            if(!columns.contains(next.getKey())){
                iterator.remove();
            }
        }
    }
}

