/**
 * This file is part of Sonedyan.
 * 
 * Sonedyan is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public
 * License as published by the Free Software Foundation;
 * either version 3 of the License, or (at your option) any
 * later version.
 *
 * Sonedyan is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied
 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 * PURPOSE.  See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public
 * License along with Octave; see the file COPYING.  If not
 * see <http://www.gnu.org/licenses/>.
 * 
 * Copyright (C) 2009-2013 Jimmy Dubuisson <jimmy.dubuisson@gmail.com>
 */

package org.unige.mpej.eckmann.sonedyan.mr;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

public class ColinkGraphExtractor extends Configured  implements Tool
{
	private static Logger log = Logger.getLogger(org.unige.mpej.eckmann.sonedyan.mr.ColinkGraphExtractor.class);
	
	/*
	 * efficient intersection of 2 sets
	 * 
	 * http://highlyscalable.wordpress.com/2012/06/05/fast-intersection-sorted-lists-sse/
	 * 
	 * A, B - operands, sorted arrays
	 * s_a, s_b - sizes of A and B
	 * C - result buffer
	 * return size of the result C
	 */
	public static ArrayList<Integer> intersection(ArrayList<Integer> A, ArrayList<Integer> B)
	{
		if (A != null && B != null)
		{
			int s_a = A.size(), s_b = B.size();
		    int i_a = 0, i_b = 0;
		    
		    ArrayList<Integer> C = new ArrayList<Integer>();
		    
		    while (i_a < s_a && i_b < s_b)
		    {
		        if (A.get(i_a) < B.get(i_b)) 
		        {
		            i_a++;
		        } 
		        else if (B.get(i_b) < A.get(i_a))
		        {
		            i_b++;
		        } 
		        else 
		        {
		            C.add(A.get(i_a));
		            i_a++; i_b++;
		        }
		    }
		    return C;
	    }
		else
		{
			return null;
		}
	}
	
	/**
	 * mapper
	 */
	public static class Map extends MapReduceBase implements Mapper<Text, Text, Text, Text>
	{
		// Input: <u; v>
		// if index(v) > index(u) then emit <u;v>
		@Override
		public void map(Text key, Text value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{
			int u = new Integer(key.toString());
			int v = new Integer(value.toString());
			
			if (v > u)
			{
				// child relation
				output.collect(new Text(new Integer(u).toString()), new Text(v + ",1"));
			}
			else
			{
				// parent relation
				output.collect(new Text(new Integer(v).toString()), new Text(u + ",-1"));
			}
		}
	}

	/**
	 * reducer
	 */
	public static class Reduce extends MapReduceBase implements Reducer<Text, Text, Text, Text>
	{
		// Input: <u; (v,1) or (v, -1)>
		// if <u; (v,1)> & <u;(v,-1)>, then emit <u; v>
		@Override
		public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{	
			// load the neighbours of v
			ArrayList<Integer> cu = new ArrayList<Integer>();
			ArrayList<Integer> pu = new ArrayList<Integer>();
			
			while (values.hasNext())
			{
				String[] vs = values.next().toString().split(",");
				
				int v = new Integer(vs[0]);
				int t = new Integer(vs[1]);
				
				// log.info("v: " + v + ", t: " + t);
				
				// u must be the least vertex in the directed triangle
				if (t == 1)
					cu.add(new Integer(v));
				// (t == -1 && v > u)
				else 
					pu.add(new Integer(v));
			}
			
			// log.info("# > children: " + cu.size());
			// log.info("# parents: " + pu.size());
			
			// sort the parents 
			Collections.sort(pu);
			
			ArrayList<Integer> inter = intersection(cu, pu);
			
			if (inter != null && inter.size() > 0)
			{	
				for (Integer w:inter)
				{
					output.collect(key, new Text(w.toString()));
				}
			}
		}
	}
	
	/** 
	 * Hadoop tutorial: http://developer.yahoo.com/hadoop/tutorial/
	 */
	public final int run(final String[] args) throws Exception
	{
		// http://blogs.msdn.com/b/avkashchauhan/archive/2012/03/29/how-to-chain-multiple-mapreduce-jobs-in-hadoop.aspx
		
		/*
		 * configuring job 1
		 */
		JobConf conf1 = new JobConf(getConf(), ColinkGraphExtractor.class);
		
		conf1.setJobName("CG");
		/*
		 	-> in mapred-site.xml
		 	
			<property>
	    		<name>mapred.map.child.java.opts</name>
	    		<value>-Xmx1024m</value>
	    		<description>heap size for map tasks </description>
	  		</property>

		  <property>
		    <name>mapred.reduce.child.java.opts</name>
		    <value>-Xmx1024m</value>
		    <description>heap size for reduce tasks </description>
		  </property>
		 */
		// conf1.set("mapred.map.child.java.opts", "-Xmx12g");
		// conf1.set("mapred.reduce.child.java.opts", "-Xmx12g");
		conf1.setNumReduceTasks(4); 
		
		conf1.setMapperClass(Map.class);
		conf1.setReducerClass(Reduce.class);

		conf1.setInputFormat(KeyValueTextInputFormat.class);
		conf1.setOutputFormat(TextOutputFormat.class);
		
		conf1.setOutputKeyClass(Text.class);
		conf1.setOutputValueClass(Text.class);

		String inputPath = args[0];
		FileInputFormat.setInputPaths(conf1, new Path(inputPath));
		FileOutputFormat.setOutputPath(conf1, new Path("colink"));
		
		JobClient.runJob(conf1);
			
		return 0;
	}
	
	/**
	 * 
	 */
	public static void main(final String[] args) throws Exception
	{
		  Configuration conf = new Configuration();
		  
		  // it is possible to complete the job arguments here
		  // String[] args2 = new String[] { "-D", "mapred.reduce.tasks=3"};
		  int res = ToolRunner.run(conf, new org.unige.mpej.eckmann.sonedyan.mr.ColinkGraphExtractor(), args);
		  System.exit(res);
	}

}
