package com.arch.bigdata.hbase;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;

/**
 * @author pizhihui
 * @date 2023-08-16 14:36
 */
public class HbaseTimeoutParamCoprocessor implements RegionCoprocessor, RegionObserver {

    public static final int SLEEP_TIME = 2000;

    private static final Logger LOG = LoggerFactory.getLogger(HbaseTimeoutParamCoprocessor.class);



    @Override
    public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> c, Get get, List<Cell> result) throws IOException {
        LOG.warn("HbaseTimeoutParamCoprocessor get {}", get.toJSON());
        Threads.sleep(SLEEP_TIME);
    }

    @Override
    public void prePut(final ObserverContext<RegionCoprocessorEnvironment> e, final Put put,
                       final WALEdit edit, final Durability durability) throws IOException {
        Threads.sleep(SLEEP_TIME);
    }

    @Override
    public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> e,
                               final Increment increment) throws IOException {
        Threads.sleep(SLEEP_TIME);
        return null;
    }

    @Override
    public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> e,
                          final Delete delete, final WALEdit edit, final Durability durability) throws IOException {
        Threads.sleep(SLEEP_TIME);
    }
}
