package edu.npu.GraphIndex;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Scanner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;

public class GraphIndexGeneration {
	final static String SEPARATOR = "\t";
	final static String PATH_PREFIX = "hdfs://test118:9000/user/zhao/";
	final public static int HOP = 4;// 标记到底记录到几跳的最短路径
	public static class GraphIndexGenerationMapper extends Mapper<Text, Text, Text, EdgeWritable>{
		@Override
		protected void map(Text key, Text value, Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			String[] splits = line.split(SEPARATOR);
			EdgeWritable e = new EdgeWritable(new LongWritable(Long.parseLong(splits[1])), 
					new LongWritable(Long.parseLong(splits[2])));
			context.write(new Text(splits[0]), e);
		}
	}
	public static class GraphIndexGenerationPartitioner extends Partitioner<Text, EdgeWritable> {
		@Override
		public int getPartition(Text key, EdgeWritable value, int num) {
			return Integer.parseInt(key.toString());
		}
	}
	public static class GraphIndesGenerationReducer extends Reducer<Text, EdgeWritable, Text, Text> {
		int part = -1;//标记属于哪个part
		Map<Long, List<Long>> graph = new HashMap<Long, List<Long>>();//图结构
		Map<Long, String> label = new HashMap<Long, String>();
		Map<Long, Integer> distance = new HashMap<Long, Integer>();
		Map<Long, Map<Integer, Map<String, CntVid>>> histogramIndex = 
			new HashMap<Long, Map<Integer, Map<String, CntVid>>>();
		Map<String, List<Long>> lookupTable = new HashMap<String, List<Long>>();
		Map<Long, Integer> partition = new HashMap<Long, Integer>();
		@Override
		protected void reduce(Text key, Iterable<EdgeWritable> values, Context context)
				throws IOException, InterruptedException {
			Queue<Long> bfsQueue = new LinkedList<Long>();
			
			//构建关于label的结构
			part = Integer.parseInt(key.toString());
			FileSystem fs = FileSystem.get(new Configuration());
			FSDataInputStream fsd = fs.open(new Path(PATH_PREFIX + "label"));
			Scanner scanner = new Scanner(fsd);
			String str = null;
			String[] splits = null;
			context.write(new Text("#boundary"), null);
			//循环用来构建label的映射和广度优先搜索的队列
			while(scanner.hasNext()) {
				str = scanner.nextLine();
				splits = str.split(SEPARATOR);
				String[] part_key = splits[0].split("#");
				if(part_key.length == 2)
					context.write(new Text(splits[0]), new Text(part_key[0]));
				label.put(Long.parseLong(part_key[part_key.length - 1]), splits[1]);
				partition.put(Long.parseLong(part_key[part_key.length - 1]), Integer.parseInt(part_key[0]));
			}
			scanner.close();
			
			//构建原始拓扑图
			InitiateGraph(graph, values, partition, bfsQueue, part);

			//建立distance
			DistanceInit distInit = new DistanceInit(bfsQueue, distance, partition, part);
			BasicGraphBFS distBgb = new BasicGraphBFS(bfsQueue);
			distBgb.traverse(graph, distInit);
			//建立histogram
			BasicGraphBFS histBgb = new BasicGraphBFS();
			HistogramInit histInit = new HistogramInit();
			for(Entry<Long, String> entry : label.entrySet()) {
				if(!histogramIndex.containsKey(entry.getKey())) {
					histogramIndex.put(entry.getKey(), new HashMap<Integer, Map<String, CntVid>>());
				}
				histInit.histogram = histogramIndex.get(entry.getKey());
				//建立遍历队列
				for(long node : graph.get(entry.getKey())) {
					histInit.queue.add(node);
				}
				histBgb.queue = histInit.queue;
				histInit.label = label;
				histInit.traversed.add(entry.getKey());
				histBgb.traverse(graph, histInit);
				histInit.clear();
			}
			//建立lookupTable
			for(Entry<Long, String> entry : label.entrySet()) {
				if(!lookupTable.containsKey(entry.getValue()))
					lookupTable.put(entry.getValue(), new ArrayList<Long>());
				lookupTable.get(entry.getValue()).add(entry.getKey());
			}
			//然后把这些索引的信息分别写入各块的输出里面
			context.write(new Text("#distance"), null);
			for(Entry<Long, Integer> entry : distance.entrySet()) {
				context.write(new Text(entry.getKey() + ""), new Text(entry.getValue() + ""));
			}
			context.write(new Text("#lookuptable"), null);
			StringBuilder sb = new StringBuilder();
			for(Entry<String, List<Long>> entry : lookupTable.entrySet()) {
				for(long node : entry.getValue()) {
					sb.append(node + "#");
				}
				context.write(new Text(entry.getKey()), new Text(sb.toString()));
				sb.delete(0, sb.length());
			}
			context.write(new Text("#histogram"), null);
			for(Entry<Long, Map<Integer, Map<String, CntVid>>> entry : histogramIndex.entrySet()) {
				for(Entry<Integer, Map<String, CntVid>> distance : entry.getValue().entrySet()) {
					for(Entry<String, CntVid> label : distance.getValue().entrySet()) {
						//只把节点写入，节点数量后面恢复索引的时候再统计
						for(long node : label.getValue().vid) {
							sb.append(node + SEPARATOR);
						}
						context.write(new Text(entry.getKey() + ""), new Text(distance.getKey() + SEPARATOR + 
								label.getKey() + SEPARATOR + sb.toString()));
						sb.delete(0, sb.length());
					}
				}
			}
		}
	}
	//遍历value，构建图
	public static void InitiateGraph(Map<Long, List<Long>> graph, Iterable<EdgeWritable> edges, 
			Map<Long, Integer> partition, Queue<Long> bfsQueue, int part) {
		long src, dist;
		List<Long> list = null;
		for(EdgeWritable edge : edges) {
			src = edge.src.get();
			dist = edge.dist.get();
			//对于src如果原来不包含就对他的临接点初始化，然后添加dist
			if(!graph.containsKey(src)) {
				graph.put(src, new ArrayList<Long>());
			}
			list = graph.get(src);
			list.add(dist);
			//对于dist如果原来不包含就对他的临接点初始化，然后添加src
			if(!graph.containsKey(dist)) {
				graph.put(dist, new ArrayList<Long>());
			}
			list = graph.get(dist);
			list.add(src);
			if(partition.get(src) != partition.get(dist)) {
				if(partition.get(src) != part)
					bfsQueue.add(src);
				else
					bfsQueue.add(dist);
			}
		}
	}

	public static void main(String[] args) {
		IdentityHashMap<Integer, HashMap<Integer, Integer>> map = new IdentityHashMap<Integer, HashMap<Integer,Integer>>();
		map.put(1, new HashMap<Integer, Integer>());
		map.get(1).put(1, 1);
		map.put(1, new HashMap<Integer, Integer>());
		map.get(1).put(2, 2);
	}
}









