package com.pxene.hbase2hbase;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Hbase2HbaseReducer extends TableReducer<Text, Text, Text> {
 
    private Text resultKey = new Text();
    private Map<String, Integer> countMap=new HashMap<>();
 
    public void reduce(Text key, Iterable<Text> values, Context context)
            throws IOException, InterruptedException {
    	
    	//每一类只取100000篇
        String number=key.toString().substring(0, 4);
        if(countMap.containsKey(number)){
        	countMap.put(number, countMap.get(number)+1);
        }else{
        	countMap.put(number, 1);
        }
        if(countMap.get(number)>100000){
        	return;
        }
        Put put = new Put(key.toString().getBytes());
        for (Text value : values) {
           String qualifier=value.toString().split("\\|",2)[0];
           String cellValue=value.toString().split("\\|",2)[1];
           // 添加到HBase需要创建put对象
           put.addColumn("article".getBytes(), qualifier.getBytes(), Bytes.toBytes(cellValue));
           context.write(key, put);
        }
    }
    @Override
    protected void cleanup(Reducer<Text, Text, Text, Mutation>.Context context) throws IOException, InterruptedException {
    	for (Entry<String, Integer> entry : countMap.entrySet()) {
			System.out.println("======count========="+entry.getKey()+":"+entry.getValue());
		}
    }
}