package finalhadoop;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.filecache.DistributedCache;



public class FinalStringMatching {
	
	  public static int[] BoyerMoorePreProcess(String pat) {
	       
	    	int R = 256;
	    	int[] right = new int[R];// position of rightmost occurrence of c in the pattern
	        for (int c = 0; c < R; c++)
	            right[c] = -1;
	        for (int j = 0; j < pat.length(); j++)
	            right[pat.charAt(j)] = j;
	        
	        return right;
	    }
	
  public static class Map extends Mapper<LongWritable, Text, WordKey, Text> {
     
     private Text location = new Text();
     private File stringListFile;
    // private Path[] filepath;
     
     @Override
     public void setup(Context context) throws IOException, InterruptedException{
    	   
    	 //   filepath = DistributedCache.getLocalCacheFiles(context.getConfiguration());
    	  //stringListFile = new File(filepath[0].toUri()); 	//normal case
    	    stringListFile = new File("./query.dat"); 			//fix URI is not absolute
    	}
     
     
     
     public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
  	 
    	 
         String currentFile = ((FileSplit) context.getInputSplit()).getPath().getName();
    	 BufferedReader linereader = new BufferedReader(new InputStreamReader(new FileInputStream(stringListFile)));
    	 
    	 String KeyWord;
    	 int keyOffset = (int)key.get();
		 					
    	 byte[] longByte = value.getBytes();
    	 
    	 while((KeyWord = linereader.readLine())!=null){
				
    			int M = KeyWord.length();
        		int N = longByte.length;
        		int right [] = BoyerMoorePreProcess(KeyWord) ;
        		int x;//value of length to stop searching
        		
        		if ((int)longByte[0]==0)
        			x=KeyWord.length();// for last split
        		else x= 99;
        		
        		int skip;
        		for (int i = 1; i <= N-1-x; i += skip) {
        			skip = 0;
        			for (int j = M-1; j >= 0; j--) {
        				if (KeyWord.charAt(j) != longByte[i+j]) {
        					skip = Math.max(1, j - right[longByte[i+j]]);
        					break;
        				}
        			}
        			if (skip == 0) //match is found
        			{
        				int Loc = keyOffset+(i-1);
        				location.set(Integer.toString(Loc));
        				WordKey wordKey = new WordKey(currentFile+","+KeyWord, new Long(Loc));
        				context.write(wordKey, location);
        				skip++;
        			}
        		}
    		 
	     }

    	 linereader.close();
    	
     }
  }
  
  public static class Reduce extends Reducer<WordKey, Text, Text, Text> {
		
	  private Text keys = new Text();
	  
	  public void reduce(WordKey key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
		 
		 keys.set(key.toString()); 
    	 boolean firstKey = true;
    	 for (Text value : values) {
    		context.write(firstKey ? keys : null, value);
    	    firstKey = false;
    	 }
     }
}
  
  public static void main(String[] args) throws Exception {
    
	 Configuration conf = new Configuration();  
	 conf.set("mapred.max.split.size","67108864");//32MB = 33554432 
     Job job = new Job(conf, "StringMatching");
     job.setJarByClass(FinalStringMatching.class);
	  	 
     job.setPartitionerClass(NaturalKeyPartitioner.class);
	 job.setGroupingComparatorClass(NaturalKeyGroupingComparator.class);
	 job.setSortComparatorClass(CompositeKeyComparator.class);
	 
	 job.setMapOutputKeyClass(WordKey.class);
	 job.setMapOutputValueClass(Text.class);
     
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
     	     
     job.setMapperClass(Map.class);
     job.setReducerClass(Reduce.class);
     
     job.setInputFormatClass(CustomFileInputFormat.class);
     job.setOutputFormatClass(CustomTextOutputFormat.class);
     
     DistributedCache.addCacheFile(new URI("./wordlist/query.dat"), job.getConfiguration());
     FileInputFormat.addInputPath(job, new Path(args[0]));
     FileOutputFormat.setOutputPath(job, new Path(args[1]));
     job.waitForCompletion(true);


  }
}