/**
 * Copyright 2010 Sonal Goyal
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"); 
 * you may not use this file except in compliance with the License. 
 * You may obtain a copy of the License at 
 * 
 * http://www.apache.org/licenses/LICENSE-2.0 
 * 
 * Unless required by applicable law or agreed to in writing, software distributed 
 * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 
 * CONDITIONS OF ANY KIND, either express or implied. 
 * See the License for the specific language governing permissions and limitations under the License. 
 */

package com.meghsoft.hiho.job;

import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Map.Entry;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;

import com.meghsoft.hiho.common.HIHOConf;
import com.meghsoft.hiho.common.OutputStrategyEnum;
import com.meghsoft.hiho.mapreduce.DBInputAvroMapper;
import com.meghsoft.hiho.mapreduce.DBInputDelimMapper;
import com.meghsoft.hiho.mapreduce.lib.db.DBQueryInputFormat;
import com.meghsoft.hiho.mapreduce.lib.output.NoKeyOnlyValueOutputFormat;


public class DBQueryInputJob extends Configured implements Tool {

	private final static Logger logger = Logger
			.getLogger(com.meghsoft.hiho.job.DBQueryInputJob.class);

	private ArrayList params;

	@Override
	public int run(String[] args) throws IOException {
		Configuration conf = getConf();

		Job job = new Job(conf);
		for (Entry<String, String> entry : conf) {
			logger.debug("key, value " + entry.getKey() + "="
					+ entry.getValue());
		}
		//logger.debug("Number of maps " + conf.getInt("mapred.map.tasks", 1));
		//conf.setInt(JobContext.NUM_MAPS, conf.getInt("mapreduce.job.maps", 1));
		//job.getConfiguration().setInt("mapred.map.tasks", 4);
		job.getConfiguration().setInt(JobContext.NUM_MAPS, conf.getInt(HIHOConf.NUMBER_MAPPERS, 1)); 
		logger.debug("Number of maps " + conf.getInt(JobContext.NUM_MAPS, 1));
		
		job.setJobName("Import job");
		job.setJarByClass(DBQueryInputJob.class);
		
		String strategy = conf.get(HIHOConf.INPUT_OUTPUT_STRATEGY);
		OutputStrategyEnum os = OutputStrategyEnum.valueOf(strategy);
		if (os == null) {
			throw new IllegalArgumentException(
					"Wrong value of output strategy. Please correct");
		}
		switch (os) {

			case DUMP: {
				//job.setMapperClass(DBImportMapper.class);
				break;
			}
			case AVRO: {
				job.setMapperClass(DBInputAvroMapper.class);
				//need avro in cp
				//job.setJarByClass(Schema.class);
				//need jackson which is needed by avro - ugly!
				//job.setJarByClass(ObjectMapper.class);
				job.setMapOutputKeyClass(GenericRecord.class);
				job.setMapOutputValueClass(Text.class);
				break;
			}
			case DELIMITED: {
				job.setMapperClass(DBInputDelimMapper.class);
				job.setMapOutputKeyClass(Text.class);
				job.setMapOutputValueClass(Text.class);
				job.setOutputKeyClass(Text.class);
				job.setOutputValueClass(Text.class);
				break;
			}
			case JSON: {
				//job.setMapperClass(DBImportJsonMapper.class);
				//job.setJarByClass(ObjectMapper.class);
				break;
			}
			default: {
				job.setMapperClass(DBInputDelimMapper.class);
				job.setMapOutputKeyClass(Text.class);
				job.setMapOutputValueClass(Text.class);
				job.setOutputKeyClass(Text.class);
				job.setOutputValueClass(Text.class);
				break;
			}
		}
		
		String inputQuery = conf.get(DBConfiguration.INPUT_QUERY);
		String inputBoundingQuery = conf.get(DBConfiguration.INPUT_BOUNDING_QUERY);
		logger.debug("About to set the params");
		DBQueryInputFormat.setInput(job, inputQuery, inputBoundingQuery, params);
		logger.debug("Set the params");
		
		job.setOutputFormatClass(NoKeyOnlyValueOutputFormat.class);

		FileOutputFormat.setOutputPath(job,
				new Path(getConf().get(HIHOConf.INPUT_OUTPUT_PATH)));
		
		job.setNumReduceTasks(0);

		try {
			//job.setJarByClass(Class.forName(conf.get(
			//		org.apache.hadoop.mapred.lib.db.DBConfiguration.DRIVER_CLASS_PROPERTY)));
			job.waitForCompletion(false);
		} catch (Exception e) {
			e.printStackTrace();
		}
		return 0;
	}

	public static void main(String[] args) throws Exception {
		//setUp();
		DBQueryInputJob job = new DBQueryInputJob();
		ArrayList params = new ArrayList();
		params.add(false);
		job.setParams(params);
		int res = ToolRunner.run(new Configuration(), job,
				args);
		System.exit(res);
	}

	
	public ArrayList getParams() {
		return params;
	}

	public void setParams(ArrayList params) {
		this.params = params;
	}

	/* this will move to the junit once everything is properly done

	public static void setUp() {
		// set up the database
		String db = "cumulus";
		String root = "root";
		String pwd = "newpwd";

		String user = "tester";
		String password = "password";

		Connection conn;
		String url = "jdbc:mysql://localhost:3306/";
		String driverName = "com.mysql.jdbc.Driver";

		try {
			Class.forName(driverName).newInstance();
			conn = DriverManager.getConnection(url, root, pwd);
			try {
				Statement st = conn.createStatement();

				String dbDrop = "drop database if exists " + db;
				st.executeUpdate(dbDrop);
				logger.debug("Dropped database");

				String dbCreate = "create database " + db;
				st.executeUpdate(dbCreate);
				logger.debug("Created database");

				// Register a new user named tester on the
				// database named cumulus with a password
				// password enabling several different
				// privileges.
				st.executeUpdate("GRANT SELECT,INSERT,UPDATE,DELETE,"
						+ "CREATE,DROP " + "ON " + db + ".* TO '" + user
						+ "'@'localhost' " + "IDENTIFIED BY '" + password
						+ "';");
				logger.debug("Created user tester");
				st.close();

				// now connect to the relevent db and create the schema
				conn = DriverManager.getConnection(url + db, root, pwd);
				st = conn.createStatement();

				String desTable = "Create table if not exists designations(id integer, designation varchar(30));\n";
				st = conn.createStatement();
				st.executeUpdate(desTable);

				logger.debug(desTable);

				String desTableData = "insert into designations(id, designation) values(";
				desTableData += "0, 'Manager');\n";
				st.executeUpdate(desTableData);
				desTableData = "insert into designations(id, designation) values(";
				desTableData += "1, 'Accountant');\n";
				st.executeUpdate(desTableData);
				desTableData = "insert into designations(id, designation) values(";
				desTableData += "2, 'Assistant');\n";
				st.executeUpdate(desTableData);
				desTableData = "insert into designations(id, designation) values(";
				desTableData += "3, 'Sr. Manager');\n";
				logger.debug(desTableData);
				st.executeUpdate(desTableData);
				logger.debug("Data inserte4d into designations");

				String table = "CREATE TABLE if not exists extractJobEmployee(id integer, name varchar(50), age integer";
				table += ", isMarried boolean, salary double, designationId integer);";
				st = conn.createStatement();
				st.executeUpdate(table);

				logger.debug("Schema creation process successfull!");
				logger.debug("Inserting table data");

				for (int i = 0; i < 50; ++i) {
					int designation = i % 4;
					String tableData = "Insert into extractJobEmployee(id, name, age, isMarried, salary, designationId) values(";
					tableData += i + ", 'Employee" + i;
					tableData += "', 25, false, 349.9," + designation + ");\n";
					logger.debug(tableData);
					st = conn.createStatement();
					st.executeUpdate(tableData);
				}
				// conn.commit();
				logger.debug("Table data insertion process successfull!");
			} catch (SQLException s) {
				s.printStackTrace();
			}
			conn.close();
		} catch (Exception e) {
			e.printStackTrace();
		}

	}
	*/

}
