package com.rainsoft.center.isec.frame.handler.impl;

import com.rainsoft.center.isec.common.entity.Constants;
import com.rainsoft.center.isec.common.utils.ISecHBaseUtils;
import com.rainsoft.center.isec.frame.handler.AJavaRDDHandler;
import com.rainsoft.center.isec.stream.library.entity.hbase.BaseLibEntity;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapred.TableOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;

import java.io.IOException;

/**
 * @Name com.rainsoft.center.isec.frame.SaveToHbaseJavaRDDHander
 * @Description
 * @Author Elwyn
 * @Version 2017/12/8
 * @Copyright 上海云辰信息科技有限公司
 **/
public class SaveToHbaseHandler<T extends BaseLibEntity> extends AJavaRDDHandler<T> {
	private String hTableName;
	private String family;

	public SaveToHbaseHandler(String hTableName, String family) {
		this.hTableName = hTableName;
		this.family = family;
	}

	@Override
	protected boolean resolve(JavaRDD<T> javaRDD) {
		Configuration conf = HBaseConfiguration.create();
		conf.set("hbase.zookeeper.quorum", Constants.ZK_HOSTS);
		conf.set(TableOutputFormat.OUTPUT_TABLE, hTableName);
		Job job = null;
		try {
			job = Job.getInstance(conf);
			job.setOutputFormatClass(org.apache.hadoop.hbase.mapreduce.TableOutputFormat.class);
		} catch (IOException e) {
			e.printStackTrace();
		}
		if (job != null) {
			//将RDD转化为(ImmutableBytesWritable, Put)的格式
			//持久化到HBase
			javaRDD.mapToPair(t -> {
				String rowKey = t.getRowKey();
				//构建put对象
				Put put = ISecHBaseUtils.generatePut(t, rowKey, family);
				return new Tuple2<>(new ImmutableBytesWritable(), put);
			}).saveAsNewAPIHadoopDataset(job.getConfiguration());

		} else {
			return false;
		}
		return true;
	}
}
