package docsim;

import java.io.*;
import java.util.*;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.jobcontrol.*;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.*;

public class buildTFIDF extends Configured implements Tool {
	//public static class Map extends MapReduceBase implements Mapper<LongWritable,Text,Text,IntWritable> {
         DistributedCache ds = new DistributedCache();
   
         private String inputFile,docidFile;
        	

       public int run(String[] args) throws Exception {
         JobConf conf = new JobConf(getConf(), buildTFIDF.class);
         conf.setJobName("vocab");

         
         conf.setOutputKeyClass(Text.class);
         conf.setOutputValueClass(Text.class);
         
         conf.setMapOutputValueClass(Text.class);
         conf.setMapOutputKeyClass(Text.class);
         
       
         conf.setMapperClass((Class<? extends Mapper>) Map3.class);
         //conf.setCombinerClass((Class<? extends Reducer>) Reduce1.class);
         conf.setReducerClass((Class<? extends Reducer>) Reduce3.class);
         
         conf.setInputFormat(TextInputFormat.class);
         conf.setOutputFormat(TextOutputFormat.class);
         
         //conf.setNumMapTasks(100);
         conf.setNumReduceTasks(10);
         

         List<String> other_args = new ArrayList<String>();
         for (int i=0; i < args.length; ++i) {
           if ("-skip".equals(args[i])) {
             DistributedCache.addCacheFile(new Path(args[++i]).toUri(), conf);
             conf.setBoolean("docsim.skip.patterns", true);
           } 
           else if ("-docmap".equals(args[i])) {
               DistributedCache.addCacheFile(new Path(args[++i]).toUri(), conf);
               conf.setBoolean("docsim.docids.map", true);
             } 
           else {
             other_args.add(args[i]);
           }
         }
         DistributedCache.addCacheFile(new Path("/home/oraut/idf.txt").toUri(), conf);

         FileInputFormat.setInputPaths(conf, new Path(other_args.get(0)));
         FileOutputFormat.setOutputPath(conf, new Path(other_args.get(1)));

         JobClient.runJob(conf);
         return 0;
       }

       public static void main(String[] args) throws Exception {
         String inpath = args[0];
         String docmap = args[1];
         String outpath = args[2];;
         String paths[]= { inpath, outpath, "-docmap",docmap };
    
        int res = ToolRunner.run(new Configuration(), new buildTFIDF(), paths);
         System.exit(res);
       }   
}

