package DatabaseManaging;

import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import DatabaseManaging.DatabaseManager;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
import org.apache.tools.ant.types.CommandlineJava.SysProperties;

import Autodesk.ParseLogs;
import WebserviceChecker.CheckWebServiceProcess;
public class DataLoaderDB extends Configured implements Tool{
	private static Logger logger = Logger.getLogger(DataLoaderDB.class);

// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
											// Mapper //
// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public static class LogMapper extends Mapper<Object, Text, Text, Text> {
		Text keyText=new Text("1");
		//IntWritable one=new IntWritable(1);
		public void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			context.write(keyText, keyText);
		}
	}
	// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
											// Reducer //
	// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public static class LogReducer extends Reducer<Text, Text, Text, Text> {

		File filetoInsert;
		String table;
		String filePath;
		public void setup(Context context){
			Configuration conf=context.getConfiguration();
			filePath = conf.get("file.path");
			 if (conf.getBoolean("file.patterns",false )){
				  Path[] patternsFiles = new Path[0];
				  try{
			           patternsFiles = DistributedCache.getLocalCacheFiles(conf);
				  }catch(IOException ioe){
			           System.err.println("Caught exception while getting cached files: ");
				  }
				  for (Path patternsFile : patternsFiles) {
					  filetoInsert=new File(patternsFile.getName());
					  logger.info("file uploaded");
				  }
			 }
			 table = conf.get("table");
		}

		public void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			
			Properties prop=new Properties();
	    	DatabaseManager db = new DatabaseManager();
			prop.load(DatabaseManager.class.getClassLoader().getResourceAsStream("DBproperties"));
			Connection conn=db.connect(prop.getProperty("jdbc.url"), prop.getProperty("jdbc.username"), prop.getProperty("db_password"));
			System.out.println("FILE: "+filePath);
			System.out.println("Paramenter: "+table);

			logger.info("FILE: "+filePath);
			logger.info("Paramenter: "+table);
			db.importDataFile(conn, filePath, table);
		}
	}
// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
										// Main Method //
// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

public int run(String[] args) throws Exception { 
	Configuration conf = getConf();
	
	Job job = new Job(conf, "DataLoaderDB");
	job.setJarByClass(CheckWebServiceProcess.class);

	job.setMapperClass(LogMapper.class);
	job.setReducerClass(LogReducer.class);
	
	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(Text.class);

	List<String> other_args = new ArrayList<String>();
	for (int i = 0; i < args.length; ++i) {
		if ("-file".equals(args[i])) {
			Path filePath=new Path(args[++i]);
			DistributedCache.addCacheFile(filePath.toUri(),
					job.getConfiguration());
			job.getConfiguration().setBoolean("file.patterns",
					true);
			job.getConfiguration().set("file.path", filePath.toString());
		
		}else if ("-table".equals(args[i])) {
				job.getConfiguration().set("table", new String(args[++i]));
		}else{
			other_args.add(args[i]);
		}
	}
    FileInputFormat.setInputPaths(job, new Path(other_args.get(0)));
    FileOutputFormat.setOutputPath(job, new Path(other_args.get(1)));

	System.exit(job.waitForCompletion(true)?0:1);
	
	return 0;

}

public static void main(String[] args) throws Exception {
	int res = ToolRunner.run(new Configuration(), new DataLoaderDB(), args);
	System.exit(res);
	}

}
