package mappers;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.BufferedFSInputStream;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.log4j.Logger;

import types.NodeInfo;

public class ImportNQuadInputFileMapper extends Mapper<LongWritable, Text, Text, NodeInfo> {

	private static final Logger sLogger = Logger.getLogger(ImportNQuadInputFileMapper.class);
	
	Configuration conf = null;
	HashMap<String, Float> activationMap = null;
	HashMap<String, Float> weightMap = null;
	
	@Override
	public void setup(Context context) throws IOException {
		// Get configuration
		conf = context.getConfiguration();
		
		// Get activations and weights file path (only filename is used here)
		Path activationsPath = new Path(conf.get("files.activations", "activations.dat"));
		Path weightsPath = new Path(conf.get("files.weights", "weights.dat"));
		
		// Read cache data (activations and weights) and store in a hashmap
		activationMap = new HashMap<String, Float>();
		weightMap = new HashMap<String, Float>();
		
		// Get local paths to the files
		Path[] localFiles = DistributedCache.getLocalCacheFiles(conf);
		
		// For each local cache file
		for(Path cachePath : localFiles) {
			// Get file name
			String fileName = cachePath.getName();
			
			// If file name is the same as the weights file
			if(fileName.equals(weightsPath.getName())) {
					
				BufferedReader br = new BufferedReader(new FileReader(cachePath.toString()));
				String line = null;
				
				// Read line by line
				while((line = br.readLine()) != null) {
					
					// Tokenize line
					StringTokenizer tk = new StringTokenizer(line);
					if(tk.countTokens() != 2) continue;					
					// Read URI
					String uri = tk.nextToken().trim();					
					// Read weight
					Float w =  Float.valueOf(tk.nextToken().trim());					
					// Save pair uri,weight in the map
					weightMap.put(uri, w);
					sLogger.info("HASHMAP WEIGHT PUT: " + uri + " " + w);
				}
			}
			// If file name is the same as the activations file
			else if(fileName.equals(activationsPath.getName())) {
							
				BufferedReader br = new BufferedReader(new FileReader(cachePath.toString()));
				String line = null;
				
				while((line = br.readLine()) != null) {
					
					// Tokenize line
					StringTokenizer tk = new StringTokenizer(line);
					if(tk.countTokens() != 2) continue;
					// Read URI
					String uri = tk.nextToken();
					// Read weight
					Float w =  Float.valueOf(tk.nextToken().trim());
					// Store pair uri,weight in the hashmap
					activationMap.put(uri, w);					
				}
			}
		}
	}
	
	public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
	
		// Read lines of the text file and emit pairs [Text, NodeInfo] (subject, info)
		
		//sLogger.info("Mapping pair:" + value);		
		
		// Create out value
		NodeInfo outValue = new NodeInfo();
		
		// Tokenize line
		StringTokenizer tokenizer = new StringTokenizer(value.toString());
		if( tokenizer.countTokens() != 5 ) {
			// WRONG LINE
			return;
		}
		
		// Get subject
		String subject = tokenizer.nextToken();
		outValue.setNode(subject);
		
		// Lookup node activation in the distributed cache map
		float activation = 0.0f;
		if(activationMap.containsKey(subject)) {
			// If found use specific activation value
			activation = activationMap.get(subject).floatValue();
			//sLogger.debug("USING HASHMAP ACTIVATION FOR: " + subject);
		}
		else {
			// If not found use default activation
			activation = conf.getFloat("algorithm.activation.default", 0.0f);
			//sLogger.debug("USING DEFAULT ACTIVATION FOR: " + subject);
		}
		outValue.setActivation(activation);
		//sLogger.debug("ACTIVATION=" + activation);
		
		// Get predicate
		String predicate = tokenizer.nextToken();
		
		// Lookup predicate weight in the distributed cache map
		float weight = 0.0f;
		if(weightMap.containsKey(predicate)) {
			// If found use specific weight value
			weight = weightMap.get(predicate).floatValue();
			//sLogger.debug("USING HASHMAP WEIGHT FOR: " + predicate);
		}
		else {
			// If not found use default weight
			weight = conf.getFloat("algorithm.weight.default", 0.0f);	
			//sLogger.debug("USING DEFAULT WEIGHT FOR: " + predicate);
		}
		//sLogger.info("WEIGHT=" + weight);
		
		// Get object
		String object = tokenizer.nextToken();
		
		// Add connection
		outValue.addConnection(object, weight);
		
		// Get context
		//String ctx = tokenizer.nextToken();
		
		//sLogger.debug("Emitting pair key " + subject);
		
		// If no weight bound this edge
		if( weight != 0.0f ) {
			// Emit pair
			context.write(new Text(subject), outValue);
		}
	}
}
