package com.chis.jmdatatimer.main;

import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.io.IOException;
import java.util.List;

/**
 * @author wlj
 * @version 2018年12月18日
 * @Description
 * @Company zwx
 */
public class HbaseTest {

    public static void main(String[] args) {

        try {
//        System.setProperty("HADOOP_USER_NAME", "root");
            SparkConf sparkConf=new SparkConf();
            sparkConf.setAppName("hive_convert_task");
            sparkConf.setMaster("yarn-cluster");
            sparkConf.set("spark.yarn.dist.files", "hdfs://192.168.150.90:9000/home/hadoop/yarn-site.xml");
            sparkConf.set("spark.yarn.jars", "hdfs://192.168.150.90:9000/home/hadoop/jars/*.jar");

            JavaSparkContext sc = new JavaSparkContext(sparkConf);
            Configuration conf = HBaseConfiguration.create();
            conf.set("hbase.rootdir", "hdfs://192.168.150.90:9000/hbase");
            //Scan操作
            Scan scan = new Scan();
//            scan.setStartRow(Bytes.toBytes("1"));
//            scan.setStopRow(Bytes.toBytes("10"));
            scan.addFamily(Bytes.toBytes("info"));
            scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
//            scan.addColumn(Bytes.toBytes("data"), Bytes.toBytes("age"));

            conf.set(TableInputFormat.INPUT_TABLE, "ll:test");

            ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
            String ScanToString = Base64.encodeBytes(proto.toByteArray());

            conf.set(TableInputFormat.SCAN, ScanToString);
            JavaPairRDD<ImmutableBytesWritable, Result> myRDD = sc.newAPIHadoopRDD(conf, TableInputFormat.class,
                    ImmutableBytesWritable.class, Result.class);
            //输出数据条数
            System.out.println("共查询到数量: " + myRDD.count());

            //把读取到的Result转化成String RDD并保存成test文件夹
//            JavaRDD<String> result = myRDD.map(x -> Bytes.toString(x._2().getValue(Bytes.toBytes("data"), Bytes.toBytes("name"))));
//            result.saveAsTextFile("hdfs://192.168.150.90:9000/test/1");

        } catch (IOException e) {
            e.printStackTrace();
        }




    }
}
