package com.atguigu;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class Hbase_DML {
    //获取Hbase连接
    public static Connection connection = Hbase_Connect_Single.connection;

    //增加数据
    public static void putCell(String namespace, String tableName, String rowKey, String family, String cloumn, String value) throws IOException {
        //1.通过Connection获取Table对象
        Table table = connection.getTable(TableName.valueOf(namespace, tableName));

        //2.创建Put对象
        Put put = new Put(Bytes.toBytes(rowKey));

        //指定往哪个列族哪个列中写入什么数据
        put.addColumn(Bytes.toBytes(family), Bytes.toBytes(cloumn), Bytes.toBytes(value));


        //3.调用Put方法写入数据
        table.put(put);


        //关闭Table对象
        table.close();
    }

    //使用get获取某一行或者 获取某一行的某一列族数据  或者 获取某一行某一列族的某一列数据
    public static void getCells(String namespace, String tableName, String rowKey, String family, String cloumn) throws IOException {
        //1.通过Hbase连接获取Table对象
        Table table = connection.getTable(TableName.valueOf(namespace, tableName));

        //2.条用get方法获取数据
        Get get = new Get(Bytes.toBytes(rowKey));

        //获取所有版本的数据
        get.readAllVersions();

        //获取某个列族的数据
        get.addFamily(Bytes.toBytes(family));

        //获取某个列族某个列的数据
//        get.addColumn(Bytes.toBytes(family), Bytes.toBytes(cloumn));

        Result result = table.get(get);

        //首先获取到cell数组,并遍历
        for (Cell cell : result.rawCells()) {
            System.out.println(Bytes.toString(CellUtil.cloneRow(cell)) + "-" + Bytes.toString(CellUtil.cloneFamily(cell)) + "-" + Bytes.toString(CellUtil.cloneQualifier(cell)) + "-" + Bytes.toString(CellUtil.cloneValue(cell)));
        }

//        byte[] value = result.value();

//        System.out.println(Bytes.toString(value));

        //关闭Table对象
        table.close();
    }

    //删除数据
    public static void deleteCells(String namespace, String tableName, String rowKey, String family, String cloumn) throws IOException {
        //1.根据connection获取Table对象
        Table table = connection.getTable(TableName.valueOf(namespace, tableName));

        //2.调取Delete方法
        Delete delete = new Delete(Bytes.toBytes(rowKey));

        //删除单个列里面最新版本的数据
        delete.addColumn(Bytes.toBytes(family), Bytes.toBytes(cloumn));

        //删除指定列所有的版本数据
//        delete.addColumns(Bytes.toBytes(family), Bytes.toBytes(cloumn));

        table.delete(delete);

        //关闭Table对象
        table.close();

    }

    //扫描数据
    public static void scanTable(String namespace, String tableName, String startRow, String stopRow) throws IOException {
//1.根据connection获取Table对象
        Table table = connection.getTable(TableName.valueOf(namespace, tableName));

        //2.调取Scan获取表的数据
        //创建Scan对象
        Scan scan = new Scan();

        //获取指定版本个数的数据
//        scan.readVersions(3);

        //读取超过维护版本的数据
        scan.setRaw(true);

        //读取所有版本的数据
        scan.readAllVersions();


        //通过startRow和stopRow设置扫描范围
        scan.withStartRow(Bytes.toBytes(startRow));
        scan.withStopRow(Bytes.toBytes(stopRow),true);

        ResultScanner resultScanner = table.getScanner(scan);

        //3.遍历ResultScanner获取到每个Result
        for (Result result : resultScanner) {
            //遍历Result获取到每一个cell
            for (Cell cell : result.rawCells()) {
                System.out.println(Bytes.toString(CellUtil.cloneRow(cell)) + "-" + Bytes.toString(CellUtil.cloneFamily(cell)) + "-" + Bytes.toString(CellUtil.cloneQualifier(cell)) + "-" + Bytes.toString(CellUtil.cloneValue(cell)));
            }
        }

        //关闭Table对象
        table.close();

    }


    public static void main(String[] args) throws IOException {
//        putCell("bigdata", "student", "1001", "info", "name", "zs");
//        putCell("bigdata", "student", "1001", "info", "age", "18");
//        putCell("bigdata", "student", "1001", "info", "age", "17");
//        putCell("bigdata", "student", "1001", "info", "age", "19");
//        putCell("bigdata", "student", "1001", "info", "age", "20");
//        putCell("bigdata", "student", "1001", "info", "age", "21");
//        putCell("bigdata", "student", "1001", "info", "age", "22");
//        putCell("bigdata", "student", "1001", "info", "age", "23");
//        putCell("bigdata", "student", "1002", "info", "name", "ls");
//        putCell("bigdata", "student", "1002", "info", "age", "18");
//        putCell("bigdata", "student", "1002", "info", "age", "19");
//        putCell("bigdata", "student", "1002", "info", "age", "20");
//        putCell("bigdata", "student", "1003", "info", "name", "ww");
//        putCell("bigdata", "student", "1003", "info", "age", "17");
//        putCell("bigdata", "student", "1003", "info", "age", "21");
//        putCell("bigdata", "student", "1004", "info", "name", "jinlian");
//        putCell("bigdata", "student", "1004", "info", "age", "16");
//        putCell("bigdata", "student", "1005", "info", "name", "dalang");
//        putCell("bigdata", "student", "1005", "info", "age", "30");


//        deleteCells("bigdata", "student", "1001", "info", "age");
//        getCells("bigdata", "student", "1001", "info", "age");
        scanTable("bigdata", "student","1001", "1002");

        //关闭hbase连接
        Hbase_Connect_Single.closeConnect();
    }
}
