import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;

public class SortMapper extends Mapper<LongWritable, Text, DescendingIntWritable, Text> {
    private DescendingIntWritable count = new DescendingIntWritable();
    private Text word = new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {
        // 输入格式为 "单词 数量"，分割后交换key和value
        String[] parts = value.toString().split("\\s+");
        if (parts.length == 2) {
            count.set(Integer.parseInt(parts[1]));
            word.set(parts[0]);
            context.write( count, word); // 输出 (数量, 单词)
        }
    }
}