package com.v.compression;


import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;


/*
* LongWritable
* Text
*
* */
public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    private Text text = new Text();

    IntWritable intWritable = new IntWritable(1);

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {

        String string = value.toString();//第一步：将value转换为字符串类型，方便操作。

        String[] s = string.split(" ");//第二步，分割string类型字符串,并将每行数据通过空格切割
        //循环写出
        for (String s1 : s) {
            //封装s1
            text.set(s1);
            //写出
            context.write(text,intWritable);//这里需要转换类型
        }

    }
}
