package org.apache.nutch.scoring.webgraph.algorithms;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapFileOutputFormat;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.nutch.scoring.webgraph.LinkDatum;
import org.apache.nutch.scoring.webgraph.LinkRank;
import org.apache.nutch.scoring.webgraph.Node;
import org.apache.nutch.util.NutchConfiguration;
import org.apache.nutch.util.NutchJob;
import org.apache.nutch.util.FSUtils;

public class BFS extends LinkRank
{
	private final int maxDepth = 2;
	
	private static final String depthConfPath = "link.analyze.bfs.curr_depth";

	private Path neighborDb;
	private FileSystem fs = null;
	
	private static class NeighborFinder implements
    Mapper<Text, Writable, Text, ObjectWritable>,
    Reducer<Text, ObjectWritable, Text, Text>
  {
    private JobConf conf;
    private int depth;

    public void configure(JobConf conf)
    {
      this.conf = conf;
      depth = conf.getInt(depthConfPath, 0);
    }

    public void map(Text key, Writable value,
      OutputCollector<Text, ObjectWritable> output, Reporter reporter)
      throws IOException
    {
			ObjectWritable objWrite = new ObjectWritable();
			
			// Invert neighborDb
			if (value instanceof Text)
			{
				objWrite.set(WritableUtils.clone(key, conf));
				output.collect((Text)value, objWrite);
				return;
			}

			objWrite.set(WritableUtils.clone(value, conf));
			output.collect(key, objWrite);
    }

    public void reduce(Text key, Iterator<ObjectWritable> values,
      OutputCollector<Text, Text> output, Reporter reporter)
      throws IOException
    {
    	List<Text> keys = new ArrayList<Text>();
    	List<LinkDatum> links = new ArrayList<LinkDatum>();

      while (values.hasNext())
      {
				Object value = values.next().get();
				
				if (value instanceof LinkDatum)
				{
					LinkDatum link = (LinkDatum)value;

					// Depth is even -- looking for inlinks. Depth is odd -- for outlinks.
					if ((depth % 2 == 0 && link.getLinkType() == LinkDatum.INLINK) ||
						(depth % 2 == 1 && link.getLinkType() == LinkDatum.OUTLINK))
					{
						if (!links.contains(link))
						  links.add(link);
					}
				}
				else if (value instanceof Text)
					keys.add((Text)value);
      }

      for (Object url : keys.toArray())
      {
      	for (Object link : links.toArray())
      	{
      		Text linkUrl = new Text(((LinkDatum)link).getUrl());

      		LOG.warn(url + " => " + linkUrl);

      		output.collect((Text)url, linkUrl);
      	}
      }
    }

    public void close() throws IOException
    {
    }
  }

	private static class Scorer implements
			Mapper<Text, Writable, Text, ObjectWritable>,
			Reducer<Text, ObjectWritable, Text, Node>
	{
		private JobConf conf;
		private int depth;
		private double factor = 0.0f;

		public void configure(JobConf conf)
		{
			this.conf = conf;
			depth = conf.getInt(depthConfPath, 0);
			factor = Math.pow(.5, depth);
		}

		/**
		 * Convert values to ObjectWritable
		 */
		public void map(Text key, Writable value,
			OutputCollector<Text, ObjectWritable> output, Reporter reporter)
			throws IOException
		{
			ObjectWritable objWrite = new ObjectWritable();
			objWrite.set(WritableUtils.clone(value, conf));
			output.collect(key, objWrite);
		}

		public void reduce(Text key, Iterator<ObjectWritable> values,
				OutputCollector<Text, Node> output, Reporter reporter)
				throws IOException
		{
			Node node = null;
			int count = 0;

			while (values.hasNext())
			{
				Object value = values.next().get();
				
				if (value instanceof Node)
					node = (Node) value;
				else if (value instanceof Text)
					count++;
			}

			// store the score in a temporary NodeDb
			Node outNode = (Node) WritableUtils.clone(node, conf);
			float score = (float) (node.getInlinkScore() + factor * count);
			LOG.warn("Setting new score for " + key + " = " + score);
			outNode.setInlinkScore(score);
			output.collect(key, outNode);
		}

		public void close() throws IOException
		{
		}
	}

	private static class NeighborInitializer implements
			Mapper<Text, Node, Text, Text>
	{
		private JobConf conf;

		public void configure(JobConf conf)
		{
			this.conf = conf;
		}

		public void map(Text key, Node value,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException
		{
			LOG.warn(key + " => " + key);
			output.collect(key, key);
		}

		public void close() throws IOException
		{
		}
	}

	private static class Uniqer implements
			Mapper<Text, Text, Text, Text>,
			Reducer<Text, Text, Text, Text>
	{
		private JobConf conf;

		public void configure(JobConf conf)
		{
			this.conf = conf;
		}

		public void map(Text key, Text value,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException
		{
			LOG.warn("Uniq map: ( " + key + ", " + value + ")");
			output.collect(key, value);
		}

		public void reduce(Text key, Iterator<Text> values,
				OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException
		{
			List<Text> uniqNeighbors = new ArrayList<Text>();

			while (values.hasNext())
			{
				Text neighbor = values.next();

				LOG.warn("Uniq reduce: ( " + key + ", " + neighbor+ ")");

				if (!uniqNeighbors.contains(neighbor)/* && !key.toString().equals(neighbor)*/)
				{
					LOG.warn("Adding " + neighbor);
					uniqNeighbors.add(new Text(neighbor));
				}
			}

			LOG.warn("Neighbors of " + key + ":");
			LOG.warn("(Should be: " + uniqNeighbors.toString() + ")");
			for (Object neighbor : uniqNeighbors.toArray())
			{
				LOG.warn("         " + ((Text)neighbor));
				output.collect(key, (Text)neighbor);
			}
		}

		public void close() throws IOException
		{
		}
	}

	private void runNeighborInitializer(Path nodeDb, Path neighborDb) throws IOException
	{
    JobConf initializer = new NutchJob(getConf());

		initializer.setJobName("LinkAnalysis BFS algorithm - NeighborInitializer");

		FileInputFormat.addInputPath(initializer, nodeDb);
		FileOutputFormat.setOutputPath(initializer, neighborDb);

    initializer.setMapOutputKeyClass(Text.class);
    initializer.setMapOutputValueClass(Text.class);
    initializer.setInputFormat(SequenceFileInputFormat.class);
    initializer.setMapperClass(NeighborInitializer.class);
    initializer.setOutputKeyClass(Text.class);
    initializer.setOutputValueClass(Text.class);
    initializer.setOutputFormat(MapFileOutputFormat.class);

    // run the initializer
    LOG.info("Starting BFS-neighbor-initialization job");
    try {
      JobClient.runJob(initializer);
    }
    catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
      throw e;
    }
    LOG.info("Finished BFS-neighbor-initialization job.");
	}

	private void genDepthNeigborDb(int depth, Path linkDb, Path neighborDb,
			Path output) throws IOException
	{
		Path tempNeighborDb = new Path(neighborDb.getParent(), "genDepthNeighborDb.tmp");

    JobConf neighborFinder = new NutchJob(getConf());

		neighborFinder.setInt(depthConfPath, depth);
		neighborFinder.setJobName("LinkAnalysis BFS algorithm - Scorer,  depth " + depth +
				" of " + maxDepth);
		FileInputFormat.addInputPath(neighborFinder, neighborDb);
		FileInputFormat.addInputPath(neighborFinder, linkDb);
		FileOutputFormat.setOutputPath(neighborFinder, tempNeighborDb);

    neighborFinder.setMapOutputKeyClass(Text.class);
    neighborFinder.setMapOutputValueClass(ObjectWritable.class);
    neighborFinder.setInputFormat(SequenceFileInputFormat.class);
    neighborFinder.setMapperClass(NeighborFinder.class);
    neighborFinder.setReducerClass(NeighborFinder.class);
    neighborFinder.setOutputKeyClass(Text.class);
    neighborFinder.setOutputValueClass(Text.class);
    neighborFinder.setOutputFormat(SequenceFileOutputFormat.class);

    // run the initializer
    LOG.info("Starting BFS-neighbor-searching job");
    try {
      JobClient.runJob(neighborFinder);
    }
    catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
      throw e;
    }
    LOG.info("Finished BFS-neighbor-searching job.");

    // Remove duplicates from neighborDb.
    JobConf uniqer = new NutchJob(getConf());

		uniqer.setJobName("LinkAnalysis BFS algorithm - Uniqer,  depth " + depth +
				" of " + maxDepth);
		FileInputFormat.addInputPath(uniqer, tempNeighborDb);
		FileOutputFormat.setOutputPath(uniqer, output);

    uniqer.setMapOutputKeyClass(Text.class);
    uniqer.setMapOutputValueClass(Text.class);
    uniqer.setInputFormat(SequenceFileInputFormat.class);
    uniqer.setMapperClass(Uniqer.class);
    uniqer.setReducerClass(Uniqer.class);
    uniqer.setOutputKeyClass(Text.class);
    uniqer.setOutputValueClass(Text.class);
    uniqer.setOutputFormat(MapFileOutputFormat.class);

    // run the initializer
    LOG.info("Starting BFS-neighbor-uniq job");
    try {
      JobClient.runJob(uniqer);
    }
    catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
      throw e;
    }
    LOG.info("Finished BFS-neighbor-uniq job.");

    FSUtils.replace(fs, neighborDb, tempNeighborDb, true);
	}

	private void runScorer(int depth, Path neighborDb, Path nodeDb,
			Path output) throws IOException
	{
    JobConf scorer = new NutchJob(getConf());

		scorer.setInt(depthConfPath, depth);
		scorer.setJobName("LinkAnalysis BFS algorithm - Scorer,  depth " + depth +
				" of " + maxDepth);
		FileInputFormat.addInputPath(scorer, neighborDb);
		FileInputFormat.addInputPath(scorer, nodeDb);
		FileOutputFormat.setOutputPath(scorer, output);

    scorer.setMapOutputKeyClass(Text.class);
    scorer.setMapOutputValueClass(ObjectWritable.class);
    scorer.setInputFormat(SequenceFileInputFormat.class);
    scorer.setMapperClass(Scorer.class);
    scorer.setReducerClass(Scorer.class);
    scorer.setOutputKeyClass(Text.class);
    scorer.setOutputValueClass(Node.class);
    scorer.setOutputFormat(MapFileOutputFormat.class);
    
    LOG.info("Starting BFS-scoring job");
    try {
      JobClient.runJob(scorer);
    }
    catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
      throw e;
    }
    LOG.info("Finished BFS-scorer job.");
	}

  /**
   * Runs the link analysis job. The link analysis job applies the link rank
   * formula to create a score per url and stores that score in the NodeDb.
   * 
   * Typically the link analysis job is run a number of times to allow the link
   * rank scores to converge.
   * 
   * @param nodeDb The node database from which we are getting previous link
   * rank scores.
   * @param inverted The inverted inlinks
   * @param output The link analysis output.
   * @param iteration The current iteration number.
   * @param numIterations The total number of link analysis iterations
   * 
   * @throws IOException If an error occurs during link analysis.
   */
  protected void runAnalysis(Path nodeDb, Path inverted, Path output,
    int iteration, int numIterations, float rankOne)
    throws IOException
  {
		fs = FileSystem.get(getConf());
		neighborDb = new Path(output.getParent(), "bfs.neighborDb");

  	runNeighborInitializer(nodeDb, neighborDb);
  	fs.copyToLocalFile(nodeDb, output);
  	for (int depth = 0; depth < maxDepth; depth++)
  	{
    	Path tempNeighborDb = new Path(neighborDb.getParent(), "bfs.neighbors.tmp");
    	Path tempNodeDb = new Path(neighborDb.getParent(), "bfs.nodes.tmp");

    	// Getting new neighborDb.
  		genDepthNeigborDb(depth, inverted, neighborDb, tempNeighborDb);
  		FSUtils.replace(fs, neighborDb, tempNeighborDb, true);

  		runScorer(depth, neighborDb, output, tempNodeDb);
  		FSUtils.replace(fs, output, tempNodeDb, true);
  	}

  }

	public int run(String[] args) throws Exception
	{
		getConf().setInt("link.analyze.num.iterations", 1);
		getConf().set("link.analyze.initial.score", "0");

		setNormalizerMapClass(null);
		setNormalizerReduceClass(null);
		setConvergerMapClass(null);
		setConvergerReduceClass(null);

		return super.run(args);
	}

	/**
	 * @param args
	 * @throws Exception 
	 */
	public static void main(String[] args) throws Exception
	{
		LinkRank alg = new BFS();

		int res = ToolRunner.run(NutchConfiguration.create(), alg, args);
		System.exit(res);
	}

}