package org.unigram.oluolu.recommend;

import java.io.IOException;
import java.util.Random;
import java.util.Vector;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
// import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
// import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import org.unigram.oluolu.common.FsUtil;
import org.unigram.oluolu.common.OluoluConstants;
import org.unigram.oluolu.common.OluoluLogger;
import org.unigram.oluolu.common.Query;
import org.unigram.oluolu.recommend.function.MinWiseFunction;

/**
 *
 */
public class QueryRecommendation extends 
    Configured implements Tool {

    /** logger. */
    private OluoluLogger logger  = OluoluLogger.getLogger();

    /** random generator. */
    private Random rand = new Random();    
    
    static {
        Configuration.addDefaultResource("conf/oluolu-default.xml");
        Configuration.addDefaultResource("conf/oluolu-site.xml");        
    }
    
    /** 
     * run job for query recommendation.
     * 
     * @param args arguments containing input and output files
     * 
     * @throws IOException -
     * @throws InterruptedException -
     * @throws ClassNotFoundException -
     * @throws Exception -
     * 
     * @return 0 success otherwise return 1
     * 
     */
    public final int run(final String[] args) 
    throws IOException,
        InterruptedException, 
        ClassNotFoundException, Exception {
        Configuration conf = getConf();
        return this.run(args, conf);
    }
    
    /** 
     * run job for query recommendation.
     * 
     * @param args arguments containing input and output files
     * @param conf contains configuration
     * 
     * @throws Exception -
     * 
     * @return 0 success otherwise return 1
     */    
    public int run(final String[] args, 
            final Configuration conf) 
    throws Exception {
        FileSystem fs = FileSystem.get(conf);        

        String inputFile = "";
        String changedInput = ""; 
        String outputPrefix = "";
        String clusterDir = "";
        String relatedUsersFile = "";
        String addedFeatureile = "";
        
        int iterate = 1;
        int depth   = 0;
        int rowSize = 0;

        for (int i = 0; i < args.length; ++i) {
            if ("-input".equals(args[i])) {
                inputFile = args[++i];
                changedInput = inputFile + ".mod";                
                clusterDir = inputFile + ".clusters";
            } else if ("-output".equals(args[i])) {
                outputPrefix = args[++i];
                relatedUsersFile = outputPrefix + ".rr";
                addedFeatureile = outputPrefix + ".ff";
            } else if ("-depth".equals(args[i])) {
                conf.setInt(RecommendConstants.FEATURE_DEPTH, 
                        Integer.parseInt(args[++i]));
            } else if ("-iterate".equals(args[i])) {
                iterate = Integer.parseInt(args[++i]);
            }  else if ("-maxCluster".equals(args[i])) {
                conf.setLong(RecommendConstants.MAX_CLUSTER_SIZE, 
                        Long.parseLong(args[++i]));
            }  else if ("-minCluster".equals(args[i])) {
                conf.setLong(RecommendConstants.MIN_CLUSTER_SIZE, 
                        Long.parseLong(args[++i]));
            } else if ("-maxRecommend".equals(args[i])) {
                conf.setLong(RecommendConstants.MAX_OUTPUT_SIZE, 
                        Long.parseLong(args[++i]));
            } 
        }
        
        this.logger.logInfo("Changing input format");        
        this.changeInputFormat(
                inputFile, changedInput, conf);

        Vector<Long> keys = new Vector<Long>();  
        FsUtil.checkPath(new Path(clusterDir),
                FileSystem.get(conf));        
        for (int i =0; i < iterate; i++) {
            String clusterOutputFile = new String(
                    clusterDir+"/"+"iter"+Integer.toString(i));
            this.logger.logInfo("Extracting clusters: " + clusterOutputFile);
            Long hashKey = this.rand.nextLong();
            conf.setLong(MinWiseFunction.MINWISE_HASH_SEED, hashKey);
            keys.add(hashKey);
            Counters counters = this.extractClusters(changedInput, 
                    clusterOutputFile, conf);
        }
        
        this.logger.logInfo("Extracingt related users");        
        this.extractRelatedUsers(clusterDir + "/*", 
               relatedUsersFile, conf, fs);

        this.logger.logInfo("Adding queries of related users");        
        this.addFeatures(relatedUsersFile, 
                changedInput, addedFeatureile, 
                conf, fs);
        
        this.logger.logInfo("Extracting recommendations");        
        this.extractRecommendations(addedFeatureile,
                outputPrefix, conf, fs);        

        FsUtil.clean(FileSystem.get(conf), clusterDir);

        return 0;
    }

    /**
     * extract recommendation queries.
     * 
     * @param inputDir input dir
     * @param outputDir output dir
     * @param conf configuration 
     * @param fs FileSystem instance (local or HDFS)
     * @return true when success otherwise false
     * @throws IOException -
     * @throws InterruptedException -
     * @throws ClassNotFoundException -
     */
    private boolean extractRecommendations(final String inputDir, 
            final String outputDir, final Configuration conf,  
            final FileSystem fs) 
    throws IOException, InterruptedException, ClassNotFoundException {
        
        Path inputPath = new Path(inputDir);
        Path outputPath = new Path(outputDir);
        FsUtil.checkPath(outputPath, FileSystem.get(conf));

        Job job = new Job(conf);
        job.setJarByClass(QueryRecommendation.class);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);
        job.setMapperClass(ExtractRecommendationMapper.class);
        job.setReducerClass(ExtractRecommendationReducer.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Text.class);
        //job.setOutputFormatClass(SequenceFileOutputFormat.class);
        job.setNumReduceTasks(
                conf.getInt(OluoluConstants.NUMBER_OF_REDUCES,
                OluoluConstants.DEFAULT_NUMBER_OF_REDUCES));

        return job.waitForCompletion(true);
        
    }

    /**
     * add features to pair of related users.
     * 
     * @param recommendDir input dir
     * @param featureDir input dir 
     * @param outputFile output dir
     * @param conf configuration
     * @param fs FileSystem
     * @return true when it succeeded otherwise return false
     * @throws IOException -
     * @throws InterruptedException -
     * @throws ClassNotFoundException -
     */
    private boolean addFeatures(final String recommendDir, 
            final String featureDir, final String outputFile,
            final Configuration conf, final FileSystem fs) 
    throws IOException, InterruptedException, 
    ClassNotFoundException {
        
        Path recommendPath = new Path(recommendDir);
        Path featurePath = new Path(featureDir);
        Path outputPath = new Path(outputFile);
        FsUtil.checkPath(outputPath, FileSystem.get(conf));

        Job job = new Job(conf);
        job.setJarByClass(QueryRecommendation.class);
        FileInputFormat.addInputPath(job, recommendPath);
        FileInputFormat.addInputPath(job, featurePath);
        FileOutputFormat.setOutputPath(job, outputPath);
        job.setMapperClass(AddFeaturesMapper.class); 
        job.setReducerClass(AddFeaturesReducer.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(LongWritable.class);
        job.setNumReduceTasks(conf.getInt(OluoluConstants.NUMBER_OF_REDUCES,
                OluoluConstants.DEFAULT_NUMBER_OF_REDUCES));

        return job.waitForCompletion(true);           
        
    }

    /**
     * extract users in a same cluster.
     *  
     * @param clusterDir input dir
     * @param outputDir output dir
     * @param conf configuration
     * @param fs FileSystem
     * @return true when it succeeded otherwise false
     * @throws IOException -
     * @throws ClassNotFoundException -
     * @throws InterruptedException -
     */
    private boolean extractRelatedUsers(
            final String clusterDir, final String outputDir, 
            final Configuration conf, final FileSystem fs) 
    throws IOException, ClassNotFoundException, InterruptedException {
        
        Path inputPath = new Path(clusterDir);
        Path outputPath = new Path(outputDir);
        FsUtil.checkPath(outputPath, FileSystem.get(conf));

        Job job = new Job(conf);
        job.setJarByClass(QueryRecommendation.class);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);
        job.setMapperClass(ExtractRelatedUsersMapper.class);
        job.setReducerClass(ExtractRelatedUsersReducer.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(LongWritable.class);
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(LongWritable.class);
        //job.setOutputFormatClass(SequenceFileOutputFormat.class);
        job.setNumReduceTasks(
                conf.getInt(OluoluConstants.NUMBER_OF_REDUCES,
                OluoluConstants.DEFAULT_NUMBER_OF_REDUCES));

        return job.waitForCompletion(true);
    }

    /**
     * Extract clusters with LSH.
     * 
     * @param inputFile input file
     * @param clusterFile output file 
     * @param conf configuration object
     * @return counter instance
     * @throws IOException -
     * @throws InterruptedException -
     * @throws ClassNotFoundException -
     */
    private Counters extractClusters(final String inputFile, 
            final String clusterFile,
            final Configuration conf) throws IOException, 
            InterruptedException, ClassNotFoundException {
        
        
        Path inputPath = new Path(inputFile);
        Path outputPath = new Path(clusterFile);
        FsUtil.checkPath(outputPath, FileSystem.get(conf));

        Job job = new Job(conf);
        job.setJarByClass(QueryRecommendation.class);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);
        job.setMapperClass(SelectClustersMapper.class);
        job.setReducerClass(SelectClustersReducer.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Text.class);
        //job.setOutputFormatClass(SequenceFileOutputFormat.class);
        job.setNumReduceTasks(
                conf.getInt(OluoluConstants.NUMBER_OF_REDUCES,
                OluoluConstants.DEFAULT_NUMBER_OF_REDUCES));

        job.waitForCompletion(true);
        return job.getCounters();
    }
    
    /**
     * Change input format into feature representation.
     * 
     * @param inputFile input file name
     * @param outputFile output file name
     * @param conf configuration
     * @throws IOException -
     * @throws ClassNotFoundException -
     * @throws InterruptedException -
     */
    private void changeInputFormat(final String inputFile, 
            final String outputFile, final Configuration conf) 
    throws IOException, ClassNotFoundException,
            InterruptedException  {
        Path inputPath = new Path(inputFile);
        Path outputPath = new Path(outputFile);
        FsUtil.checkPath(outputPath, FileSystem.get(conf));

        Job job = new Job(conf);
        job.setJarByClass(QueryRecommendation.class);
        FileInputFormat.addInputPath(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);
        job.setMapperClass(ChangeFormatMapper.class);
        job.setReducerClass(ChangeFormatReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Query.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        job.setNumReduceTasks(conf.getInt(OluoluConstants.NUMBER_OF_REDUCES,
                OluoluConstants.DEFAULT_NUMBER_OF_REDUCES));        
        
        job.waitForCompletion(true);        
    }
    
    /**
     * Main method.
     *
     * @param args argument strings which contain input and output files.
     * @throws Exception -
     */
    public static void main(final String[] args)
    throws Exception {
        int exitCode = ToolRunner.run(
                new QueryRecommendation(), args);
        System.exit(exitCode);
    }    
        
}
