package org.shuzhou.h_sink;

import java.io.IOException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.flink.annotation.Internal;
import org.apache.flink.connector.hbase.sink.HBaseMutationConverter;
import org.apache.flink.connector.hbase.util.HBaseConfigurationUtil;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
// import org.apache.flink.runtime.util.ExecutorThreadFactory;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction.Context;
import org.apache.flink.util.StringUtils;
import org.apache.flink.util.concurrent.ExecutorThreadFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.BufferedMutator.ExceptionListener;


@Internal
public class HBaseSinkFunction<T> extends RichSinkFunction<T> implements CheckpointedFunction, ExceptionListener {
    private static final long serialVersionUID = 1L;
    // private static final Logger LOG = LoggerFactory.getLogger(HBaseSinkFunction.class);
    private final String hTableName;
    private final byte[] serializedConfig;
    private final long bufferFlushMaxSizeInBytes;
    private final long bufferFlushMaxMutations;
    private final long bufferFlushIntervalMillis;
    private final HBaseMutationConverter<T> mutationConverter;
    private transient Connection connection;
    private transient BufferedMutator mutator;
    private transient ScheduledExecutorService executor;
    private transient ScheduledFuture scheduledFuture;
    private transient AtomicLong numPendingRequests;
    private transient volatile boolean closed = false;
    private final AtomicReference<Throwable> failureThrowable = new AtomicReference();

    public HBaseSinkFunction(String hTableName, Configuration conf, HBaseMutationConverter<T> mutationConverter,
            long bufferFlushMaxSizeInBytes, long bufferFlushMaxMutations, long bufferFlushIntervalMillis) {
        this.hTableName = hTableName;
        this.serializedConfig = HBaseConfigurationUtil.serializeConfiguration(conf);
        this.mutationConverter = mutationConverter;
        this.bufferFlushMaxSizeInBytes = bufferFlushMaxSizeInBytes;
        this.bufferFlushMaxMutations = bufferFlushMaxMutations;
        this.bufferFlushIntervalMillis = bufferFlushIntervalMillis;
    }

    @Override
    public void open(org.apache.flink.configuration.Configuration parameters) throws Exception {
        // LOG.info("start open ...");
        Configuration config = this.prepareRuntimeConfiguration();

        try {
            this.mutationConverter.open();
            this.numPendingRequests = new AtomicLong(0L);
            if (null == this.connection) {
                this.connection = ConnectionFactory.createConnection(config);
            }

            BufferedMutatorParams params = (new BufferedMutatorParams(TableName.valueOf(this.hTableName)))
                    .listener(this);
            if (this.bufferFlushMaxSizeInBytes > 0L) {
                params.writeBufferSize(this.bufferFlushMaxSizeInBytes);
            }

            this.mutator = this.connection.getBufferedMutator(params);
            if (this.bufferFlushIntervalMillis > 0L && this.bufferFlushMaxMutations != 1L) {
                this.executor = Executors.newScheduledThreadPool(1,
                        new ExecutorThreadFactory("hbase-upsert-sink-flusher"));
                this.scheduledFuture = this.executor.scheduleWithFixedDelay(() -> {
                    if (!this.closed) {
                        try {
                            this.flush();
                        } catch (Exception var2) {
                            this.failureThrowable.compareAndSet((Throwable) null, var2);
                        }

                    }
                }, this.bufferFlushIntervalMillis, this.bufferFlushIntervalMillis, TimeUnit.MILLISECONDS);
            }
        } catch (TableNotFoundException var4) {
            // LOG.error("The table " + this.hTableName + " not found ", var4);
            throw new RuntimeException("HBase table '" + this.hTableName + "' not found.", var4);
        } catch (IOException var5) {
            // LOG.error("Exception while creating connection to HBase.", var5);
            throw new RuntimeException("Cannot create connection to HBase.", var5);
        }

        // LOG.info("end open.");
    }

    private Configuration prepareRuntimeConfiguration() throws IOException {
        Configuration runtimeConfig = HBaseConfigurationUtil.deserializeConfiguration(this.serializedConfig,
                HBaseConfigurationUtil.getHBaseConfiguration());
        if (StringUtils.isNullOrWhitespaceOnly(runtimeConfig.get("hbase.zookeeper.quorum"))) {
            // LOG.error("Can not connect to HBase without {} configuration", "hbase.zookeeper.quorum");
            throw new IOException("Check HBase configuration failed, lost: 'hbase.zookeeper.quorum'!");
        } else {
            return runtimeConfig;
        }
    }

    private void checkErrorAndRethrow() {
        Throwable cause = (Throwable) this.failureThrowable.get();
        if (cause != null) {
            throw new RuntimeException("An error occurred in HBaseSink.", cause);
        }
    }

    @Override
    public void invoke(T value, Context context) throws Exception {
        this.checkErrorAndRethrow();
        this.mutator.mutate(this.mutationConverter.convertToMutation(value));
        if (this.bufferFlushMaxMutations > 0L
                && this.numPendingRequests.incrementAndGet() >= this.bufferFlushMaxMutations) {
            this.flush();
        }

    }

    private void flush() throws IOException {
        this.mutator.flush();
        this.numPendingRequests.set(0L);
        this.checkErrorAndRethrow();
    }

    @Override
    public void close() throws Exception {
        this.closed = true;
        if (this.mutator != null) {
            try {
                this.mutator.close();
            } catch (IOException var3) {
                // LOG.warn("Exception occurs while closing HBase BufferedMutator.", var3);
            }

            this.mutator = null;
        }

        if (this.connection != null) {
            try {
                this.connection.close();
            } catch (IOException var2) {
                // LOG.warn("Exception occurs while closing HBase Connection.", var2);
            }

            this.connection = null;
        }

        if (this.scheduledFuture != null) {
            this.scheduledFuture.cancel(false);
            if (this.executor != null) {
                this.executor.shutdownNow();
            }
        }

    }

    @Override
    public void snapshotState(FunctionSnapshotContext context) throws Exception {
        while (this.numPendingRequests.get() != 0L) {
            this.flush();
        }

    }

    @Override
    public void initializeState(FunctionInitializationContext context) throws Exception {
    }

    @Override
    public void onException(RetriesExhaustedWithDetailsException exception, BufferedMutator mutator)
            throws RetriesExhaustedWithDetailsException {
        this.failureThrowable.compareAndSet((Throwable) null, exception);
    }
}
