import java.io.IOException;
import java.util.ArrayList;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;



public class AdConverter{

	/**
	 * @param args
	 */
  public static final String NAME = "AdConverter";
  //public static HTable htable;
    
   
	private static CommandLine parseArgs(String[] args) throws ParseException {
	    Options options = new Options();
	    Option o = new Option("t", "table", true,
	      "table to read from (must exist)");
	    o.setArgName("table-name");
	    o.setRequired(true);
	    options.addOption(o);
	    o = new Option("c", "column", true,
	      "column to read data from (must exist)");
	    o.setArgName("family:qualifier");
	    options.addOption(o);
	    o = new Option("o", "output", true,
	      "the directory to write to");
	    o.setArgName("path-in-HDFS");
	    o.setRequired(false);
	    options.addOption(o);
	    options.addOption("d", "debug", false, "switch on DEBUG log level");
	    CommandLineParser parser = new PosixParser();
	    CommandLine cmd = null;
	    try {
	      cmd = parser.parse(options, args);
	    } catch (Exception e) {
	      System.err.println("ERROR: " + e.getMessage() + "\n");
	      HelpFormatter formatter = new HelpFormatter();
	      formatter.printHelp(NAME + " ", options, true);
	      System.exit(-1);
	    }
	    if (cmd.hasOption("d")) {
	      Logger log = Logger.getLogger("mapreduce");
	      log.setLevel(Level.DEBUG);
	      System.out.println("DEBUG ON");
	    }
	    return cmd;
	  }
	
	/**
	 * @param args
	 * @throws IOException
	 * @throws ParseException
	 * @throws ClassNotFoundException
	 * @throws InterruptedException
	 */
		
	
	static class DBMapper extends Mapper<LongWritable, BrRecord, ImmutableBytesWritable, Put>{

		//private String table;
		@Override
		protected void setup(Context context){
			//table = context.getConfiguration().get("tbl_name");
			
		}
		@Override
		public void map(LongWritable offset,BrRecord brRecord,Context context){
			try {
				String sub_id = brRecord.SUBLOT_ID;
				String grade = brRecord.GRADE;
				String char_id = brRecord.CHAR_ID;
				String val_tbl = brRecord.VALUE_TBL;
				String resv = brRecord.RESV_FIELD4;
				
				byte [] ATT_FAM = Bytes.toBytes("att");
				byte [] CHAR_FAM = Bytes.toBytes("char_id");
				
				byte rowkey1[] = Bytes.toBytes(sub_id+":"+"GRADE");
				Put put1 = new Put(rowkey1);
				put1.add(ATT_FAM, Bytes.toBytes("GRADE"), Bytes.toBytes(grade));
				context.write(new ImmutableBytesWritable(rowkey1),put1);
               				
				byte rowkey2[] = Bytes.toBytes(sub_id+":"+char_id);
				Put put2 = new Put(rowkey2);
				put2.add(CHAR_FAM, Bytes.toBytes("VALUE_TBL"), Bytes.toBytes(val_tbl));
				context.write(new ImmutableBytesWritable(rowkey1), put2);
				
				
			} catch (Exception e) {
				// TODO: handle exception
			}
		}

	}

	
	
	
	public static void main(String[] args) throws IOException, ParseException, ClassNotFoundException, InterruptedException {
		// TODO Auto-generated method stub
     
		HTableDescriptor desc;
		byte [] ATT_FAM = Bytes.toBytes("att");
		byte [] CHAR_FAM = Bytes.toBytes("char_id");
		
		Configuration conf = HBaseConfiguration.create();
		conf.set("hadoop.job.ugi", "hadoop,supergroup");
		
		HBaseAdmin admin = new HBaseAdmin(conf);
      	
	    String[] otherArgs =
	      new GenericOptionsParser(conf, args).getRemainingArgs();
	    CommandLine cmd = parseArgs(otherArgs);
	    // check debug flag and other options
	    if (cmd.hasOption("d")) conf.set("conf.debug", "true");
	    // get details
	    String table = cmd.getOptionValue("t");
	    String column = cmd.getOptionValue("c");
	    String output = cmd.getOptionValue("o");

	    if(admin.tableExists(table)){
			if(admin.isTableDisabled(table)){
				admin.enableTable(table);
			}
		
			desc = admin.getTableDescriptor(Bytes.toBytes(table));
			
		}
		else
		{
			desc = new HTableDescriptor(table);
			desc.addFamily(new HColumnDescriptor(ATT_FAM));
			desc.addFamily(new HColumnDescriptor(CHAR_FAM));
			desc.setMaxFileSize(500000000);
			admin.createTable(desc);
		}
	   
	    
	    ArrayList <String> tables = new ArrayList <String>();
	    tables.add("BR0701");
	    tables.add("BR0702");
	    tables.add("BR0703");
	    tables.add("BR0704");
	    tables.add("BR0705");
	    tables.add("BR0706");
	    tables.add("BR0707");
	    tables.add("BR0708");
	    tables.add("BR0709");
	    tables.add("BR0710");
	    tables.add("BR0711");
	    tables.add("BR0712");
	    tables.add("BR0713");
	    tables.add("BR0714");
	    tables.add("BR0715");
	    tables.add("BR0716");
	    tables.add("BR0717");
	    tables.add("BR0718");
	    tables.add("BR0719");
	    tables.add("BR0720");
	    tables.add("BR0721");
	    tables.add("BR0722");
	    tables.add("BR0723");
	    tables.add("BR0724");
	    tables.add("BR0725");
	    tables.add("BR0726");
	    tables.add("BR0727");
	    tables.add("BR0728");
	    tables.add("BR0729");
	    tables.add("BR0730");

	    for(String dbTable : tables){
	       System.out.println(dbTable);
	    	Job job = new Job(conf, "DBConverter " + table);
		    job.setJarByClass(AdConverter.class);
		    job.setMapperClass(DBMapper.class);
		    job.setOutputFormatClass(TableOutputFormat.class);
		    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
		    job.setInputFormatClass(DataDrivenDBInputFormat.class); 
		    job.setOutputKeyClass(ImmutableBytesWritable.class); 
		    job.setOutputValueClass(Writable.class);
		     
		    TableMapReduceUtil.initTableReducerJob(table, null, job);
		    job.setNumReduceTasks(0);
		    
		    
		    DBConfiguration.configureDB(job.getConfiguration(), "com.mysql.jdbc.Driver", "jdbc:mysql://datamining-node01.cs.fiu.edu:33061/pdp", 
		    		"pdp", "pdp");
		    
		    String []fields = {"SUBLOT_ID","GRADE","CHAR_ID","VALUE_TBL","RESV_FIELD4"};
		    
		    DataDrivenDBInputFormat.setInput(job, BrRecord.class, dbTable, null,"SUBLOT_ID",fields);
		   
	        
		     if(!job.waitForCompletion(true)){
		    	 break;
		     }

	    }
	    
	    	 
	}

}

