package com.qst.mapreduce.wordcount.jiangjieban.lianxi2;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class Temp2WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
        String line=value.toString();
        String[] words=line.split(",");
        int Maxtemp=Integer.parseInt(words[2]);

        if(Maxtemp>30){
            Text outkey=new Text(words[2]);
            //outkey.set(words[2]);
            IntWritable outValue=new IntWritable(1);
            context.write(outkey, outValue);
        }

    }
}
