/**
 * This file is part of Sonedyan.
 * 
 * Sonedyan is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public
 * License as published by the Free Software Foundation;
 * either version 3 of the License, or (at your option) any
 * later version.
 *
 * Sonedyan is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied
 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 * PURPOSE.  See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public
 * License along with Octave; see the file COPYING.  If not
 * see <http://www.gnu.org/licenses/>.
 * 
 * Copyright (C) 2009-2013 Jimmy Dubuisson <jimmy.dubuisson@gmail.com>
 */

package org.unige.mpej.eckmann.sonedyan.mr;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

/**
 * 
 */
public class HdfsGraphLoader extends LocalGraphLoader
{
	private static Logger log = Logger.getLogger(org.unige.mpej.eckmann.sonedyan.mr.HdfsGraphLoader.class);

	public static final String HDFS_SERVER = "hdfs://karystos.unige.ch:51070";
	public static final String CORE_SITE_PATH = "/opt/hadoop/conf/core-site.xml";
	
	public static final String TWITTER_GRAPH_FOLDER = "/twitter/children/";
	public static final String FA_GRAPH_FOLDER = "/fa/children/";
	
	/**
	 * constructor
	 */
	public HdfsGraphLoader(Configuration conf)
	{
		try
		{
			Path[] paths = DistributedCache.getLocalCacheFiles(conf);
			
			Path p1,p2;
			boolean isCached = false;
			
			if (paths != null && paths.length >= 2)
			{
				log.info("Getting paths from distributed cache...");
				
				p1 = paths[0];
				p2 = paths[1];
				isCached = true;
			}	
			else
			{
				log.info("Getting paths from HDFS...");
				
				p1 = new Path(HDFS_SERVER + TWITTER_GRAPH_FOLDER + INDEX_FILE_NAME);
				p2 = new Path(HDFS_SERVER + TWITTER_GRAPH_FOLDER + DATA_FILE_NAME);
			}
			
			log.info("Loading files...");
			log.info("path1: " + p1.toUri());
			log.info("path2: " + p2.toUri());
			
			this.loadGraph(conf, p1, p2, isCached);
		}
		catch(IOException e)
		{
			log.error("Unable to create HDFS graph loader: " + e.getMessage());
		}
	}
	
	/**
	 * load binary graph in memory
	 */
	public void loadGraph(Configuration conf, Path p1, Path p2, boolean isCached)
	{
		try
		{
			log.info("Loading graph index");
			
			FileSystem fs;
			
			if (isCached)
			{
				fs = FileSystem.getLocal(conf);
			}
			else
			{
				fs = FileSystem.get(conf);
			}
			
			BufferedInputStream bis1 = new BufferedInputStream(fs.open(p1));
			BufferedInputStream bis2 = new BufferedInputStream(fs.open(p2));
			
			// load index
            byte[] iba = new byte[8];
            int currentV = -1;
            
			while (bis1.read(iba) != -1)
			{
				int v = ((iba[0] & 0xFF) << 24) | ((iba[1] & 0xFF) << 16) | ((iba[2] & 0xFF) << 8) | (iba[3] & 0xFF);
				int pos = ((iba[4] & 0xFF) << 24) | ((iba[5] & 0xFF) << 16) | ((iba[6] & 0xFF) << 8) | (iba[7] & 0xFF);
				
				this.index.put(v, pos);
				
				if (currentV == -1)
				{
					currentV = v;
					this.firstVertex = v;
				}
				else
				{
					this.nextVertex.put(currentV, v);
					currentV = v;
				}
			}
			
			// add the last vertex
			this.nextVertex.put(currentV, null);
			
			log.info("Graph index successfully loaded");
			log.info("Loading graph data");
			
			// load data
			byte[] bytes4 = new byte[4];
			int counter = 0;
			
			while (bis2.read(bytes4) != -1)
			{
				int v = ((bytes4[0] & 0xFF) << 24) | ((bytes4[1] & 0xFF) << 16) | ((bytes4[2] & 0xFF) << 8) | (bytes4[3] & 0xFF);
				this.data[counter] = v;
				counter++;
			}
			
			this.numberOfEdges = counter;
			
			bis1.close();
			bis2.close();
			
			log.info("Graph data successfully loaded");
			
			log.info("# of vertices: " + this.nextVertex.size());
			log.info("# of edges: " + counter);
			log.info("# of indexes: " + this.index.size());
		}
		catch(IOException e)
		{
			log.error("An error occured: " + e.getMessage());
		}
	}
	
	/**
	 * get vertex children
	 */
	public ArrayList<Integer> getCommonChildren(int v, ArrayList<Integer> B)
	{
		try
		{
			ArrayList<Integer> children = new ArrayList<Integer>();
			
			// will launch an exception if vertex v is a sink
			int pos1 = this.index.get(v);
			
			Integer nextV = this.nextVertex.get(v);
			int s_a;
			
			if (nextV != null)
				s_a = this.index.get(nextV) - pos1;
			else
				s_a = this.numberOfEdges - pos1;
			
			int s_b = B.size();
		    int i_a = 0, i_b = 0;
		    
		    while (i_a < s_a && i_b < s_b)
		    {
		    	int va = this.data[pos1 + i_a];
		    	int vb = B.get(i_b);
		    	
		        if (va < vb) 
		        {
		            i_a++;
		        } 
		        else if (vb < va)
		        {
		            i_b++;
		        } 
		        else 
		        {
		            children.add(va);
		            i_a++; i_b++;
		        }
		    }
			
			return children;
		}
		catch(Exception e)
		{
			// log.error("Unable to get children: " + e.getMessage());
			return null;
		}
	}
	
	/**
	 * main method
	 */
	public static void main(String args[])
	{
		// load graph binary data
		Configuration conf = new Configuration();
		conf.addResource(new Path(CORE_SITE_PATH));
		
		HdfsGraphLoader loader = new HdfsGraphLoader(conf);
		
		// get children of node 12
		ArrayList<Integer> children = loader.getChildren(12);
	
		System.out.println("# indexes: " + loader.index.size());
		System.out.println("Index of vertex 140: " + loader.index.get(140));
		
		System.out.println("# children of vertex 12: " + children.size());
		
		Iterator<Integer> it = children.iterator();
		
		while (it.hasNext())
			System.out.println("Vertex child: " + it.next());
	}
	
}
