package com.dtwave.cfstreaming.update.function;

import com.alibaba.fastjson.JSONObject;
import com.dtwave.cfstreaming.connection.HbaseConnector;
import com.dtwave.cfstreaming.update.StreamDimUpdate;
import com.dtwave.cfstreaming.update.dimtableupdate.DimTableUpdate;
import com.dtwave.utils.JsonUtils;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.hbase.client.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class UpdateFunction extends RichFilterFunction<JSONObject> {
    private final static Logger LOGGER = LoggerFactory.getLogger(StreamDimUpdate.class);
    Connection hbaseClient;

    @Override
    public void open(Configuration parameters) throws Exception {
        ParameterTool params= (ParameterTool)getRuntimeContext().getExecutionConfig().getGlobalJobParameters();
        hbaseClient = HbaseConnector.getConnection(params.get("",""),
                params.get("",""),
                params.getInt("",10));
        LOGGER.info("初始化hbaseClient");
    }

    @Override
    public boolean filter(JSONObject jsonObject) throws Exception {
        //获取kafka传过来数据的表名称
        String tableName = JsonUtils.getString(jsonObject, true, "tableName");
        //获取传过来的数据的变动内容数据
        JSONObject after = JsonUtils.getJSONObject(jsonObject, "after");
        if (after != null) {
            if ("需要监听的表名".equals(tableName)) {
                LOGGER.debug("开始更新维表数据");
                DimTableUpdate.update(hbaseClient, after);
                return false;
            }
        }
            return true;
    }

    @Override
    public void close() throws Exception {
        if(hbaseClient != null){
            hbaseClient.close();
        }
    }
}
