package jobs;

//import input.XmlInputFormat;
//import mappers.ImportXMLFileMapper;
import java.io.BufferedReader;
import java.io.FileReader;
import java.net.URI;

import mappers.ImportNQuadInputFileMapper;
import reducers.ImportNQuadInputFileReducer;
import types.NodeInfo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

//import reducers.ImportXMLFileReducer;

public class ImportNQuadInputFile extends Configured implements Tool {
	
	private static final Logger sLogger = Logger.getLogger(ImportNQuadInputFile.class);

	@Override
	public int run(String[] arg0) throws Exception {
		
		Configuration conf = getConf();
		
		// Merge our custom config variables
		conf.addResource("mrspread.xml");
		
		//String userName = UnixUserGroupInformation.login(conf).getUserName();
		// Add weights and activations files to the hadoop cache
		//Path weightsPath = new Path("/user/" + userName + "/conf/weights.dat");
		//Path activationsPath = new Path("/user/" + userName + "/conf/activations.dat");
		
		// Get activations and weights file paths
		Path activationsPath = new Path(conf.get("files.activations", "activations.dat"));
		Path weightsPath = new Path(conf.get("files.weights", "weights.dat"));		
		
		// Add activations and weights files to the dist cache
		DistributedCache.addCacheFile(activationsPath.toUri(), conf);
		DistributedCache.addCacheFile(weightsPath.toUri(), conf);
		
		
		Job job = new Job(conf, "ImportNQuadInputFile");			
			
		job.setJarByClass(ImportNQuadInputFile.class);
		job.setMapperClass(ImportNQuadInputFileMapper.class);
		job.setReducerClass(ImportNQuadInputFileReducer.class);		
		
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(NodeInfo.class);
		
		job.setInputFormatClass(TextInputFormat.class);	
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		
			
		//DistributedCache.add
		
		// Delete output directory if exists
		FileSystem hdfs = FileSystem.get(conf);
		hdfs.delete(new Path("output0"), true);

		FileInputFormat.addInputPath(job, new Path("inputnquad"));
		FileOutputFormat.setOutputPath(job, new Path("output0"));	
		
		// BEGIN KK
//		FileReader fr = new FileReader("mydata.txt"); 
//		BufferedReader br = new BufferedReader(fr);
//		String aline = br.readLine();
//		sLogger.info("LINEA: " + aline);
		// END KK
		
		long time = System.currentTimeMillis();
		job.waitForCompletion(true);
		sLogger.info("Job finished in " + (float)(System.currentTimeMillis() - time)/1000.0f + " s");
		
		return 0;
	}
	//----------------------------------------------------------------------
	public static void main(String[] args) throws Exception {

		int res = ToolRunner.run(new Configuration(), new ImportNQuadInputFile(), args);
		System.exit(res);
	}

}
