package com.alan.mr.weibo;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;

/**
 * 决定分区, 计划分4个, n一个, tf三个
 * Created by Alan on 2017/10/2.
 */
public class FirstPartition extends HashPartitioner<Text,IntWritable>{

    @Override
    public int getPartition(Text key, IntWritable value, int numReduceTasks) {
        if (key.equals(new Text("count"))) {
        	//统计微博的总条数 单独放一个reducer，第4个最后一个
            return 3;
        }else {
        	//tf放其他三个里，
            return super.getPartition(key, value, numReduceTasks - 1);
        }
    }
}
