package com.bocommlife.mi;

import java.io.IOException;
import java.util.Arrays;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapred.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.JobConf;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import scala.Tuple2;

public class WordCountToHBase {

	public static void main(String[] args) throws Exception {
//		SparkSession spark = SparkSession.builder()
//				.appName("Simple Application")
//				.config("spark.yarn.jars", "hdfs://129.1.9.47:9000/spark_jars/*")
//				.getOrCreate();
		
//		String filePath = "hdfs://129.1.9.47:9000/Hadoop/Input/wordcount.txt";
		SparkConf conf = new SparkConf()
				.setAppName("Jimmy's first spark app")
				.set("spark.yarn.jars", "hdfs://129.1.9.47:9000/spark_jars/*");
		JavaSparkContext sc = new JavaSparkContext(conf);
		
		JavaRDD<String> lines = sc.textFile(args[0]);
		
		JavaPairRDD<String, Integer> counts = lines
		.flatMap(line->Arrays.asList(line.split(" ")).iterator())
		.mapToPair(word -> new Tuple2<>(word, 1))
	    .reduceByKey((a, b) -> a + b);
		
		counts.foreach(x->System.out.println(x));
		
		counts.foreachPartition(wordOccurancePair -> {
			Configuration hbaseConf = HBaseConfiguration.create();  
			hbaseConf.set("hbase.zookeeper.property.clientPort", "2181");  
	        hbaseConf.set("hbase.zookeeper.quorum", "129.1.9.38,129.1.9.39");
	        hbaseConf.set("hbase.defaults.for.version.skip", "true");
	        
	        Connection hbaseConn = ConnectionFactory.createConnection(hbaseConf);
	        
	        Table table = hbaseConn.getTable(TableName.valueOf("users"));
	        
	        wordOccurancePair.forEachRemaining(pair -> {
	        	Put put = new Put(Bytes.toBytes(pair._1));
	        	put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("occurance"),Bytes.toBytes(pair._2));
	        	try {
					table.put(put);
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
	        });
		});
	}
}
