package MicroTime;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.util.ExecutorThreadFactory;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @author Spring_Hu
 * @date 2022/6/7 23:09
 */
public class HbaseSink extends RichSinkFunction<String> {
    private static final Logger logger = LoggerFactory.getLogger(HbaseSink.class);
    private String hTableName = "micro_on_time";
    private Long bufferFlushMaxSizeInBytes = 1024L;
    private Long bufferFlushMaxMutations = 10L;
    private Long bufferFlushIntervalMillis = 1000L;
    private Connection connection = null;
    private BufferedMutator mutator = null;
    private ScheduledExecutorService executor = null;
    private ScheduledFuture scheduledFuture = null;
    private AtomicLong numPendingRequests = null;
    private Boolean closed = false;

    @Override
    public void open(Configuration parameters) {
        logger.info("start open ...");
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        this.numPendingRequests = new AtomicLong(0);
        try {
            if (null == connection) {

                this.connection = ConnectionFactory.createConnection(config);
            }
            // create a parameter instance, set the table name and custom listener reference.
            BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(hTableName)).writeBufferSize(bufferFlushMaxSizeInBytes);
            this.mutator = connection.getBufferedMutator(params);
            if (bufferFlushIntervalMillis > 0) {
                this.executor = Executors.newScheduledThreadPool(1, new ExecutorThreadFactory("hbase-upsert-sink-flusher"));
                this.scheduledFuture = this.executor.scheduleWithFixedDelay(() -> {
                    try {
                        if (!closed) {
                            flush();
                        }
                    } catch (Exception e) {
                        logger.error("write data to hbase error, skip..", e);
                    }
                }, bufferFlushIntervalMillis, bufferFlushIntervalMillis, TimeUnit.MILLISECONDS);
            }
        } catch (TableNotFoundException e) {
            logger.error("The table " + hTableName + " not found ", e);
            throw new RuntimeException("HBase table '" + hTableName + "' not found.", e);
        } catch (IOException io) {
            logger.error("Exception while creating connection to HBase.", io);
            throw new RuntimeException("Cannot create connection to HBase.", io);
        }
        logger.info("end open ...");
    }

    @Override
    public void invoke(String value, Context context) {
        JSONObject jsonObject = JSON.parseObject(value);
        ArrayList<String> list = new ArrayList<>();
        list.addAll(Arrays.asList(Constants.MICRO_ON_TIME_COLOUMNS));

        String key = jsonObject.getString(list.get(0));
        Put put = new Put(Bytes.toBytes(key));

        for (int i = 1; i < list.size(); i++) {
            String c = list.get(i);
            String v = jsonObject.getString(c);
            put.addColumn(Constants.MICRO_ON_TIME_HBASE_FAMILY.getBytes(), c.getBytes(), v.getBytes());
        }

        try {
            mutator.mutate(put);
            // flush when the buffer number of mutations greater than the configured max size.
            if (bufferFlushMaxMutations > 0 && numPendingRequests.incrementAndGet() >= bufferFlushMaxMutations) {
                flush();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private void flush() { // BufferedMutator is thread-safe
        try {
            mutator.flush();
        } catch (IOException e) {
            e.printStackTrace();
        }
        numPendingRequests.set(0);
    }

    @Override
    public void close() {
        closed = true;
        if (mutator != null) {
            try {
                mutator.close();
            } catch (IOException e) {
                logger.warn("Exception occurs while closing HBase BufferedMutator.", e);
            }
            this.mutator = null;
        }
        if (connection != null) {
            try {
                connection.close();
            } catch (IOException e) {
                logger.warn("Exception occurs while closing HBase Connection.", e);
            }
            this.connection = null;
        }
        if (scheduledFuture != null) {
            scheduledFuture.cancel(false);
            if (executor != null) {
                executor.shutdown();
            }
        }
    }
}