package com.hbase.mapreduce;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

public class WordCount {

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf=HBaseConfiguration.create();
		Job jobConf=Job.getInstance(conf);
		jobConf.setJarByClass(WordCount.class);
		jobConf.setJobName("word count");

		Scan scan=new Scan();
		//指定Mapper读取的列族
		scan.addColumn(InitData.hWorldTableFamily, null);
		
		//指定mapper读取的表为word
		TableMapReduceUtil.initTableMapperJob(InitData.hWorldTable, scan, MyMapper.class, Text.class, IntWritable.class, jobConf);
		
		//指定Reducer写入的表为result
		TableMapReduceUtil.initTableReducerJob(InitData.hResultTable.getNameAsString(), MyReduce.class, jobConf);
		System.exit(jobConf.waitForCompletion(true)?0:1);
	}
	
	/*
	 * TableMapper<Text,IntWritable> Text:输出的Key类型，IntWritable:输出的value类型
	 * 
	 */
	public static class MyMapper extends TableMapper<Text,IntWritable>{
		private static IntWritable one=new IntWritable(1);
		private static Text word=new Text();
		
		@Override
		protected void map(ImmutableBytesWritable key,Result value,Context context) throws IOException, InterruptedException{
			//表中只有一个列族，所以直接获取每一行的值
			String words=Bytes.toString(value.getColumnCells(InitData.hWorldTableFamily, null).get(0).getValueArray());
			StringTokenizer st=new StringTokenizer(words);
			while(st.hasMoreTokens()){
				String s=st.nextToken();
				word.set(s);
				context.write(word, one);
			}
		}
	}
	
    /*
     * TableReducer<Text,IntWritable,ImmutableBytesWritable>
     * Text:输入的key类型
     * IntWritable:输入的value类型
     * ImmutableBytesWritable：输出类型
     */
	public static class MyReduce extends TableReducer<Text,IntWritable,ImmutableBytesWritable>{
		
		@Override
		protected void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException{
			int sum=0;
			for(IntWritable value:values){
				sum+=value.get();
			}
			//添加一行记录，每个单词作为行健
			Put put=new Put(Bytes.toBytes(key.toString()));
			//在result列族中添加一列num，记录每个单词出现的次数
			put.addColumn(InitData.hResultFamily, Bytes.toBytes("num"), Bytes.toBytes(String.valueOf(sum)));
			
			context.write(new ImmutableBytesWritable(Bytes.toBytes(key.toString())), put);
		}
	}
}
