/**
 * This file is part of Sonedyan.
 * 
 * Sonedyan is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public
 * License as published by the Free Software Foundation;
 * either version 3 of the License, or (at your option) any
 * later version.
 *
 * Sonedyan is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied
 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 * PURPOSE.  See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public
 * License along with Octave; see the file COPYING.  If not
 * see <http://www.gnu.org/licenses/>.
 * 
 * Copyright (C) 2009-2013 Jimmy Dubuisson <jimmy.dubuisson@gmail.com>
 */

package org.unige.mpej.eckmann.sonedyan.mr;

import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.jobcontrol.Job;
import org.apache.hadoop.mapred.jobcontrol.JobControl;
import org.apache.hadoop.mapred.lib.HashPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

/**
 * count the number of triangles in a huge graph
 */
public class DirectedTrianglesFinder extends Configured implements Tool
{
	private static Logger log = Logger.getLogger(org.unige.mpej.eckmann.sonedyan.mr.DirectedTrianglesFinder.class);
	
	/**
	 * mapper 1
	 */
	public static class Map1 extends MapReduceBase implements Mapper<Text, Text, Text, Text>
	{
		// Input: <u; v>
		// if index(v) > index(u) then emit <u;v>
		@Override
		public void map(Text key, Text value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{
			int u = new Integer(key.toString());
			int v = new Integer(value.toString());
			
			if (v > u)
			{
				// child relation
				output.collect(new Text(new Integer(u).toString()), new Text(v + ",1"));
			}
			else
			{
				// parent relation
				output.collect(new Text(new Integer(v).toString()), new Text(u + ",-1"));
			}
		}
	}

	/**
	 * reducer 1 (v2)
	 */
	public static class Reduce1b extends MapReduceBase implements Reducer<Text, Text, Text, Text>
	{
		private static HdfsGraphLoader loader;
		
		@Override
		public void configure(JobConf job)
		{
			// super.configure(job);
			
			log.info("Setting up reducer");
			
			if (loader == null)
			{
				log.info("Loading graph...");
				loader = new HdfsGraphLoader(job);
			}
		}
		
		// Input: <u; (v,1) or (v, -1)>
		// if (v,w) such that v and w in Nei(v) is in E, then emit <u; (v,w)>
		@Override
		public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{	
			// log.info("Reducer1 for key (least vertex): " + key.toString());
			
			int u = new Integer(key.toString());
			
			// load the neighbours of v
			ArrayList<Integer> cu = new ArrayList<Integer>();
			ArrayList<Integer> pu = new ArrayList<Integer>();
			
			while (values.hasNext())
			{
				String[] vs = values.next().toString().split(",");
				
				int v = new Integer(vs[0]);
				int t = new Integer(vs[1]);
				
				// log.info("v: " + v + ", t: " + t);
				
				// u must be the least vertex in the directed triangle
				if (t == 1)
					cu.add(new Integer(v));
				// (t == -1 && v > u)
				else 
					pu.add(new Integer(v));
			}
			
			// log.info("# > children: " + cu.size());
			// log.info("# parents: " + pu.size());
			
			// sort the parents 
			Collections.sort(pu);
			
			for (int v:cu)
			{
				// log.info("getting common children for vertex: " + v);
				
				ArrayList<Integer> inter = loader.getCommonChildren(v,pu);
				
				if (inter != null && inter.size() > 0)
				{
					// log.info("# triangles (" + u + " -> " + v + " -> ...): " + inter.size());
					
					for (int w:inter)
					{
						// log.info(" ------ triangles (" + u + " -> " + v + " -> " + w + ")");
						
						output.collect(key, new Text(v + "," + w));
					}
				}
			}
		}
	}
	
	/**
	 * reducer 1 (v1)
	 */
	public static class Reduce1a extends MapReduceBase implements Reducer<Text, Text, Text, Text>
	{
		// Input: <u; (v,1) or (v, -1)>
		// if (v,w) such that v and w in Nei(v) is in E, then emit <u; (v,w)>
		@Override
		public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{	
			// log.info("Reducer1 for key (least vertex): " + key.toString());
			
			int u = new Integer(key.toString());
			
			// load the neighbours of v
			Set<Integer> cu = new TreeSet<Integer>();
			Set<Integer> pu = new TreeSet<Integer>();
			
			while (values.hasNext())
			{
				String[] vs = values.next().toString().split(",");
				
				int v = new Integer(vs[0]);
				int t = new Integer(vs[1]);
				
				// u must be the least vertex in the directed triangle
				if (t == 1)
					cu.add(new Integer(v));
				// (t == -1 && v > u)
				else
					pu.add(new Integer(v));
			}
			
			// log.debug("# greater children: " + cu.size());
			// log.debug("# greater parents: " + pu.size());
			
			for (int v:cu)
			{
				for (int w:pu)
				{
					output.collect(key, new Text(v + "," + w));
				}
			}
		}
	}
	
	/**
	 * mapper 2
	 * 
	 * path of input file: /user/jimmy/twitter/twitter_children.net
	 */
	public static class Map2 extends MapReduceBase implements Mapper<Text, Text, Text, Text>
	{
		// Input: <u; (v,w)> or <u; v> 
		// if <u; (v,w)> then emit <(v,w); u>
		// if <u; v> then emit <(u,v); 0)
		public void map(Text key, Text value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{
			String u = key.toString();
			String v = value.toString();
			
			if (v.contains(","))
			{
				output.collect(new Text(v), new Text(u));
			}
			else
			{
				output.collect(new Text(u + "," + v), new Text("0"));
			}
		}
	}
	
	/**
	 * reducer 2
	 */
	public static class Reduce2 extends MapReduceBase implements Reducer<Text, Text, Text, Text>
	{
		// Input: <(v,w); S in V + {0}>
		// if 0 in S then for u in S\{0} emit <u; (v,w)>
		@Override
		public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
		{	
			// log.info("Reducer2 for key: " + key.toString());
			
			// least vertices
			Set<String> lu = new TreeSet<String>();
			boolean isAnEdge = false;
			
			while (values.hasNext())
			{
				String value = values.next().toString();
				
				if (value.compareTo("0") == 0)
					isAnEdge = true;
				else
					lu.add(value);
			}
			
			if (isAnEdge)
			{
				// log.debug("# least vertices: " + lu.size());
				
				for (String u:lu)
				{
					output.collect(new Text(u), key);
				}
			}
		}
	}

	/** 
	 * Hadoop tutorial: http://developer.yahoo.com/hadoop/tutorial/
	 */
	public final int run(final String[] args) throws Exception
	{
		// http://blogs.msdn.com/b/avkashchauhan/archive/2012/03/29/how-to-chain-multiple-mapreduce-jobs-in-hadoop.aspx
		
		/*
		 * configuring job 1
		 */
		JobConf conf1 = new JobConf(getConf(), DirectedTrianglesFinder.class);
		
		conf1.setJobName("TC1");
		/*
		 	-> in mapred-site.xml
		 	
			<property>
	    		<name>mapred.map.child.java.opts</name>
	    		<value>-Xmx1024m</value>
	    		<description>heap size for map tasks </description>
	  		</property>

		  <property>
		    <name>mapred.reduce.child.java.opts</name>
		    <value>-Xmx1024m</value>
		    <description>heap size for reduce tasks </description>
		  </property>
		 */
		conf1.set("mapred.map.child.java.opts", "-Xmx12g");
		conf1.set("mapred.reduce.child.java.opts", "-Xmx12g");
		// conf1.setPartitionerClass(HashPartitioner.class);
		conf1.setNumReduceTasks(4); 
		
		String indexFile = HdfsGraphLoader.HDFS_SERVER + HdfsGraphLoader.TWITTER_GRAPH_FOLDER + HdfsGraphLoader.INDEX_FILE_NAME;
		String dataFile = HdfsGraphLoader.HDFS_SERVER + HdfsGraphLoader.TWITTER_GRAPH_FOLDER + HdfsGraphLoader.DATA_FILE_NAME;
		
		// String indexFile = HdfsGraphLoader.HDFS_SERVER + HdfsGraphLoader.FA_GRAPH_FOLDER + HdfsGraphLoader.INDEX_FILE_NAME;
		// String dataFile = HdfsGraphLoader.HDFS_SERVER + HdfsGraphLoader.FA_GRAPH_FOLDER + HdfsGraphLoader.DATA_FILE_NAME;
		
		// String indexFile = HdfsGraphLoader.HDFS_SERVER + "/test/graph10.index";
		// String dataFile = HdfsGraphLoader.HDFS_SERVER + "/test/graph10.data";
		
		conf1.setMapperClass(Map1.class);
		conf1.setReducerClass(Reduce1b.class);

		conf1.setInputFormat(KeyValueTextInputFormat.class);
		conf1.setOutputFormat(TextOutputFormat.class);
		// conf1.setOutputFormat(SequenceFileOutputFormat.class);

		conf1.setOutputKeyClass(Text.class);
		conf1.setOutputValueClass(Text.class);

		String inputPath = args[0];
		FileInputFormat.setInputPaths(conf1, new Path(inputPath));
		FileOutputFormat.setOutputPath(conf1, new Path("out1b"));
		
		// Create symlinks in the job's working directory using the link name 
		// provided below
		DistributedCache.createSymlink(conf1);
		 
		// Add a file to the cache. It must already exist on HDFS. The text
		// after the hash is the link name.
		DistributedCache.addCacheFile(new URI(indexFile + "#graph.index"), conf1);
		DistributedCache.addCacheFile(new URI(dataFile + "#graph.data"), conf1);
		
		JobClient.runJob(conf1);
			
		return 0;
	}
	
	/** 
	 * Hadoop tutorial: http://developer.yahoo.com/hadoop/tutorial/
	 */
	public final int run2(final String[] args) throws Exception
	{
		// http://blogs.msdn.com/b/avkashchauhan/archive/2012/03/29/how-to-chain-multiple-mapreduce-jobs-in-hadoop.aspx
		
		/*
		 * configuring job 1
		 */
		JobConf conf1 = new JobConf(getConf(), DirectedTrianglesFinder.class);
		
		conf1.setJobName("TC1");
		/*
		 	-> in mapred-site.xml
		 	
			<property>
	    		<name>mapred.map.child.java.opts</name>
	    		<value>-Xmx1024m</value>
	    		<description>heap size for map tasks </description>
	  		</property>

		  <property>
		    <name>mapred.reduce.child.java.opts</name>
		    <value>-Xmx1024m</value>
		    <description>heap size for reduce tasks </description>
		  </property>
		 */
		// conf1.set("mapred.map.child.java.opts", "-Xmx12g");
		// conf1.set("mapred.reduce.child.java.opts", "-Xmx12g");
		// conf1.setPartitionerClass(HashPartitioner.class);
		// conf1.setNumReduceTasks(4); 
		
		conf1.setMapperClass(Map1.class);
		conf1.setReducerClass(Reduce1a.class);

		conf1.setInputFormat(KeyValueTextInputFormat.class);
		conf1.setOutputFormat(TextOutputFormat.class);
		// conf1.setOutputFormat(SequenceFileOutputFormat.class);

		conf1.setOutputKeyClass(Text.class);
		conf1.setOutputValueClass(Text.class);

		String inputPath = args[0];
		FileInputFormat.setInputPaths(conf1, new Path(inputPath));
		FileOutputFormat.setOutputPath(conf1, new Path("out1a"));
		
		/*
		 * configuring job 2
		 */
		
		JobConf conf2 = new JobConf(getConf(), DirectedTrianglesFinder.class);
		
		conf2.setJobName("TC2");
		conf2.setNumReduceTasks(4); 
		
		conf2.setMapperClass(Map2.class);
		conf2.setReducerClass(Reduce2.class);

		conf2.setInputFormat(KeyValueTextInputFormat.class);
		// conf2.setInputFormat(SequenceFileOutputFormat.class);
		conf2.setOutputFormat(TextOutputFormat.class);

		conf2.setOutputKeyClass(Text.class);
		conf2.setOutputValueClass(Text.class);
		
		FileInputFormat.setInputPaths(conf2, inputPath + ",out1a");
		FileOutputFormat.setOutputPath(conf2, new Path("out2a"));
		
		/*
		 * chaining jobs
		 */
		
		Job job1 = new Job(conf1); 
		Job job2 = new Job(conf2);
		
		JobControl jctrl = new JobControl("jctrl"); 
		jctrl.addJob(job1); 
		jctrl.addJob(job2); 
		job2.addDependingJob(job1); 
		jctrl.run();
		
		return 0;
	}
	
	/**
	 * 
	 */
	public static void main(final String[] args) throws Exception
	{
		  Configuration conf = new Configuration();
		  
		  // it is possible to complete the job arguments here
		  // String[] args2 = new String[] { "-D", "mapred.reduce.tasks=3"};
		  int res = ToolRunner.run(conf, new org.unige.mpej.eckmann.sonedyan.mr.DirectedTrianglesFinder(), args);
		  System.exit(res);
	}
	
}