package cop;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.wal.WALEdit;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;

/**
 * @date: 2019/7/10
 * @site: www.doitedu.cn
 * @author: hunter.d 涛哥
 * @qq: 657270652
 * @description:
 *
 * alter 'user_p','coprocessor'=>'hdfs://spark01:8020/cop/b.jar|cn.doitedu.hbase.cop.MyCoprocessor|1001|'
 * alter 'user',METHOD => 'table_att_unset', NAME=> 'coprocessor$1'
 *
 */
public class MyCoprocessor implements RegionObserver, RegionCoprocessor {

    @Override
    public void prePut(ObserverContext<RegionCoprocessorEnvironment> c, Put put, WALEdit edit, Durability durability) throws IOException {
        Log log = LogFactory.getLog(MyCoprocessor.class);
        log.warn("enter ther coprocessor....");

        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","spark01:2181,spark02:2181,spark03:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        log.warn("connected the hbase ..................");

        Table table = conn.getTable(TableName.valueOf("ids_index"));

        List<Cell> cells = put.get("f".getBytes(), "ids".getBytes());
        for (Cell cell : cells) {
            byte[] bytes = CellUtil.cloneValue(cell);
            byte[] rowBytes = CellUtil.cloneRow(cell);

            String ids = new String(bytes);

            String[] split = ids.split(":");
            ArrayList<Put> puts = new ArrayList<>();
            for (String id : split) {

                Put p = new Put(id.getBytes());
                p.addColumn("f".getBytes(),"gid".getBytes(),rowBytes);
                puts.add(p);
            }
            table.put(puts);
        }

        table.close();
        conn.close();

    }

    @Override
    public Optional<RegionObserver> getRegionObserver() {
        return Optional.of(this);
    }

    @Override
    public void start(CoprocessorEnvironment env) throws IOException {

    }

    @Override
    public void stop(CoprocessorEnvironment env) throws IOException {

    }
}
