package com.xiaojie.hadoop.hbase.coprocessor;

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.wal.WALEdit;

import java.io.IOException;

/**
 * @author 熟透的蜗牛
 * @version 1.0
 * @description: 协处理器
 * 1、将协处理器打包成jar上传到/usr/local/hbase-2.6.1/lib/目录下
 * 2、创建一个表“staff_ns:staff”,配置上协处理器
 * 3、创建一个同样结构的表"staff"
 * 4、重启habse
 * 5、测试数据
 * @date 2025/1/6 17:56
 */
@Slf4j
public class MyCoprocessor extends BaseRegionObserver {


    @Override
    public void prePut(ObserverContext<RegionCoprocessorEnvironment> c, Put put, WALEdit edit) throws IOException {
        log.info(">>>>>>>>>>>>>>>>>>>写数据之前");
    }

    @Override
    public void postPut(ObserverContext<RegionCoprocessorEnvironment> c, Put put, WALEdit edit) throws IOException {
        log.info(">>>>>>>>>>>>>>>>>>>写数据之后");
        //获取表
        Table table = c.getEnvironment().getConnection().getTable(TableName.valueOf("staff"));
        //写入数据
        table.put(put);
        //关闭表
        table.close();
    }
}
