/*
package com.culturalCenter.dataCenter.SparkOnHbaseTest;

import com.twitter.chill.Base64;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import scala.Tuple2;

import java.util.List;


*/
/**
 * 必须序化使用
 *//*

//@Component
@Slf4j
public class SparkOnHbaseTest {

    public void getHbase() {
        //        SparkSession spark = SparkSession.builder().master("local[*]").appName("HBASEDATA")
        //                .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        //                .getOrCreate();
        //        SparkSession spark = SparkSession.builder().master("spark://192.168.0.124:7077").appName("HBASEDATA")
        //                //指定driver 的hosts-name
        //                .config("spark.driver.host", "192.168.0.137")
        //                //指定driver的服务端口
        //                .config("spark.driver.port", "9092")
        //                .config("spark.driver.blockManager.port", "9093")
        ////                .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        //                .config("spark.jars", "E:\\work\\GuangzhouCulturalCenter-DataCenter\\code\\dataCenterServer\\target\\dataCenter-1.0.0.jar")
        //                .getOrCreate();
        SparkConf conf = new SparkConf().setAppName("wordCount_111")
                .setMaster("spark://192.168.0.124:7077")
                .set("spark.driver.host", "192.168.0.137")
                //指定driver的服务端口
                .set("spark.driver.port", "9092")
                .set("spark.driver.blockManager.port", "9093")
                .setJars(new String[]{"E:\\work\\GuangzhouCulturalCenter-DataCenter\\code\\dataCenterServer\\target\\dataCenter-1.0.0.jar"});

        JavaSparkContext sc = new JavaSparkContext(conf);


        String tableName = "users";
        String FAMILY = "personal";
        String COLUM_ID = "id";
        String COLUM_NAME = "name";
        String COLUM_PHONE = "phone";

        // Hbase配置
        Configuration hconf = HBaseConfiguration.create();
        hconf.set("hbase.zookeeper.quorum", "192.168.0.124");
        hconf.set("hbase.zookeeper.property.clientPort", "9095");
        hconf.set(TableInputFormat.INPUT_TABLE, tableName);
        //
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes(FAMILY));
        scan.addColumn(Bytes.toBytes(FAMILY), Bytes.toBytes(COLUM_ID));
        scan.addColumn(Bytes.toBytes(FAMILY), Bytes.toBytes(COLUM_NAME));
        scan.addColumn(Bytes.toBytes(FAMILY), Bytes.toBytes(COLUM_PHONE));
        try {

            //添加scan
            ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
            String ScanToString = Base64.encodeBytes(proto.toByteArray());
            hconf.set(TableInputFormat.SCAN, ScanToString);

            //读HBase数据转化成RDD
            JavaPairRDD<ImmutableBytesWritable, Result> hbaseRDD = sc.newAPIHadoopRDD(hconf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);
            hbaseRDD.cache();// 对myRDD进行缓存
            long count = hbaseRDD.count();

            JavaRDD<Row> rrd = hbaseRDD.map((Function<Tuple2<ImmutableBytesWritable, Result>, Row>) tuple2 -> {
                Result result = tuple2._2();
                String rowKey = Bytes.toString(result.getRow());
                String id = Bytes.toString(result.getValue(Bytes.toBytes(FAMILY), Bytes.toBytes(COLUM_ID)));
                String name = Bytes.toString(result.getValue(Bytes.toBytes(FAMILY), Bytes.toBytes(COLUM_NAME)));
                return RowFactory.create(rowKey, id, name);
            });

            List<Row> rows = rrd.collect();
            List<Result> list = hbaseRDD.map(Tuple2::_2).collect();
            for (Result result : list) {
                List<Cell> cells = result.listCells();
                for (Cell cell : cells) {
                }
            }
            String ds = ":";
            ////写入数据到hdfs系统
            ////            rrd.saveAsTextFile("hdfs://********:8020/tmp/test");
            ////
            ////            hbaseRDD.unpersist();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }

}
*/
