package com.qst.mapreduce.wordcount.mywc;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class MyWordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
        //super.map(key, value, context);
        System.out.println(key);
        System.out.println(value); //会打印出来Map阶段输入的key值和对应的value值
        String line=value.toString(); //数据的转化
        String[] words=line.split(" ");//拆分成数组
        System.out.println(words);//会打印出来数据word的内容
        // 把一个个单词当成key，把1当成value
        // for循环遍历数组，把word一个个输出
        for (String word:words) {
            Text outkey=new Text(word);
            //outkey.set(word);
            IntWritable outvalue=new IntWritable(1);
            context.write(outkey, outvalue);
        }






    }
}
