package com.chb.weibo1;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
/**
 * 自定一个分区，
 * 为了将TF和微博总数的统计生成在不同的文件中
 * @author 12285
 */
public class FirstPartitioner extends HashPartitioner<Text, IntWritable>{
	@Override
	public int getPartition(Text key, IntWritable value, int numReduceTasks) {
		if (key.toString().equals("count")) {
			return 3;
		}else {
			return super.getPartition(key, value, numReduceTasks - 1);
		}
	}

}
