package mapReduce;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;

import measures.DistanceSet;
import measures.Fuzzy_Match_Score;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;



/**
 * @author Thomas Wangler, wangler_thomas@hotmail.com
 * Translation Memories for patent translation
 * MapReduce Function.
 * MR implementation for the FMS processing
 * @args location of the testcorpus, lower bound of FMS (default: 0.7), upper bound of FMS (default: 1)
 */
public class MapReduce {
	
	public static Double fmThreshold;
	public static String testcorp = "/home/public/pattr.testset.fmstest.source";
	/**
	 * MR implementation for the FMS processing
	 * @author wangler
	 */
	
	
	
	public static class Map extends Mapper<LongWritable,Text,Text,Text>{
		
		//variables
		
		
		DistanceSet currSent;
		private ArrayList<String> sentList;
		
		/*
		 * Setup reads the file with the testsentences once and provides its content in form of an ArrayList to the mappers. 
		 */
		public void setup(Context context) throws IOException{
			
			
			sentList = readFile("/home/public/pattr.testset.fmstest.source");
			//sentList = readFile("/home/public/fms.softpro.testfile");
			
					
		}
		
		/*
		 * map function, reads from a hdfs file in the format "linenumber|||linecontent". and compared it to the testsentences
		 * which are in the same format.
		 * Output is a triple of the line of the source, the line of the match in the hdfs corpus and the FMS between the two. 
		 */
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException{
			
			String line =  value.toString().split("\\|\\|\\|")[0];	
			String target = value.toString().split("\\|\\|\\|")[1];
			double curr_fms;
			
			for(String sourceSent: sentList){
				
				String source = sourceSent.split("\\|\\|\\|")[1];
				String pos = sourceSent.split("\\|\\|\\|")[0];
				
				
				
				currSent = new DistanceSet(source, target);
			
				if(currSent.getMaxFMS() >= fmThreshold){	
					
					curr_fms = Fuzzy_Match_Score.computeFMS(currSent);
					
					if(curr_fms >= fmThreshold){
					
						context.write(new Text(pos), new Text(line + "\t" + curr_fms));
					
					}
				}
			
			}
		}
	}
	
	public static class Reduce extends Reducer<Text,Text,Text,DoubleWritable>{
		

		/*
		 * reduce function, takes the (ordered) output of the mappers and iterates over the valuesets of each key.
		 * For each key, only the first best FMS score is taken.
		 * Output is a triple of the line of the sourcesentence, the line of the targetsentence and the corresponding FMS. 
		 */
		public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
			
			Double curr_val;
			String curr_line;
			
			//variables
			double maxScore = 0;
			String maxSentLine = null;
			
			for(Text line: values){
				
				String[] val_split = line.toString().split("\t");
				
				curr_val = Double.parseDouble(val_split[1]); 
				curr_line = val_split[0].toString();
				
				if(curr_val > maxScore){
					maxScore = curr_val;
					maxSentLine = curr_line;
				}
			}	
		
			context.write(new Text(key.toString() + "\t" + maxSentLine), new DoubleWritable(maxScore));
		}
		
		
	}
	
	/*
	 * @args: string containing the path to the file
	 * reads the testsentences from a file into memory as an ArrayList.
	 */
	public static ArrayList<String> readFile(String file) throws IOException{
		
		FileInputStream stream;
		BufferedReader reader;
		ArrayList<String>testS = new ArrayList<String>();
		

			
			stream = new FileInputStream(file);
			reader = new BufferedReader(new InputStreamReader(stream));
			String line = reader.readLine();
			
			testS.ensureCapacity(2500);
			
			while(line != null){
				
				testS.add(line);
				line = reader.readLine();
			}
				
		
		
		
		reader.close();
		
		return testS;	
		
		
	}
	
	
	/**
	 * @param args
	 * @throws IOException 
	 * @throws InterruptedException 
	 * @throws ClassNotFoundException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		
		//hadoop config
		Configuration conf = new Configuration();

		Job job = new Job(conf, "computeFMS");
		
		//job output format
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(DoubleWritable.class);
		
		//map output format
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		
		//designate jar, mapper and reducer class
		job.setJarByClass(mapReduce.MapReduce.class);
		job.setMapperClass(mapReduce.MapReduce.Map.class);
		job.setReducerClass(mapReduce.MapReduce.Reduce.class);
		          
		
		//set input format
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		        
		//file handling
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));

		
		//commandline input
		if(args[2] != null){
			testcorp = args[2];
		}else{
			testcorp = "/home/public/pattr.testset.fmstest.source";
		}
		if(args[3] != null){
			fmThreshold = Double.parseDouble(args[3]);
		}else{
			fmThreshold = 0.7;
		}
		
		job.waitForCompletion(true);


	}

}
