package UniqValueCountTest;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount;

import java.io.IOException;

public class MapTest extends Mapper<LongWritable, Text, Text, Text> {
    UniqValueCount uniqValueCount = new UniqValueCount();

    @Override
    protected  void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String str[] = value.toString().split(" ");
        for (String data:str){
            uniqValueCount.addNextValue(data);
        }
        int count = Integer.parseInt(uniqValueCount.getReport());
        System.out.println("去重后的元素个数为:"+count);
    }
}
