package map;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import plagiarismChecker.MapReduceSkeleton;

public class MapJob1 extends Mapper<Text, Text, Text, Text> {

	private int phraseLength = 3;
	
	@Override
	protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
		try {
			FileInputStream fstream = new FileInputStream(key.toString());
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String strLine;
			// Read File Line By Line
			while ((strLine = br.readLine()) != null) {
				// Print the content on the console
				strLine = strLine.toLowerCase();
				strLine = strLine.replaceAll("\n", " ");
				String[] words = strLine.split(" ");
				for (int i = 0; i < words.length; i += phraseLength) {
					words[i].trim();
					String phrase = words[i];
					for (int j = 1; i + j < words.length && j < phraseLength; j++) {
						phrase += " ";
						words[i + j].trim();
						phrase += words[i + j];
					}
					if (phrase.length() != 0) {
						context.write(new Text(key + MapReduceSkeleton.separator + phrase), new Text("1"));
					}
				}
			}
		} catch (Exception e) {
			System.err.println("Error: " + e.getMessage());
		}
	}
}