package com.mapreduce.combinerwc;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

//map 阶段的类
// k1  map阶段输入的key 类型LongWritable
// v1  map阶段输入的value 类型 Text
//k2  map阶段输出的key 类型 Text
//v2 map 阶段输出的value 类型IntWritable
public class WordCountMapper  extends Mapper<LongWritable, Text,Text, IntWritable> {

    private  Text k2 = new Text();
    private  IntWritable v2 = new IntWritable(1);
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        //1. v1由Text类型转换为String：toString()方法
        String line = value.toString();
        // 2.按空格进行分词：split()方法
        //hello world
        String[] words = line.split(" ");
        //{"hello","world"}
        //3.输出k2, v2：context.write()方法
        for (String word : words) {
            k2.set(word);
            context.write(k2,v2);
        }
    }
}
