package twitterhadoop.app.test;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import twitterhadoop.hadoop.io.TextArrayWritable;

public class TwitterWordCountMapper extends Mapper<LongWritable, TextArrayWritable, Text, IntWritable> {

	@Override
	protected void map(LongWritable key, TextArrayWritable value, Context context) throws IOException, InterruptedException {
		Text outKey = new Text();
		IntWritable outValue = new IntWritable();
		String word;
		String[] words = value.toArray();
		for (int i = 0; i < words.length; i++) {
			word = words[i].replaceAll("[^a-zA-Z]", ""); 
			outKey.set(word);
			outValue.set(1);
			context.write(outKey, outValue);
		}
	}
	
}
