package edu.gatech.PI2Hbase;

import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.exception.NestableException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.mapreduce.Job;



public class PI2HbaseAdapter {
	
	public  static final String Tags_TABLE_NAME = "PIData";
	
	public  static final String PIReduce_TABLE_NAME = "PiReduce";
	
	public static final String Tags_FAMILY = "Info";

	private static final String [] INFO_NAMES = { 
		    "Server", "Collective", "Value", "Engineering Units", "Descriptor"
		  }; 
	
	private final HBaseConfiguration conf; 
	
	private final HBaseAdmin admin; 
	
//	private final HBaseConfiguration confVc; 
	
//	private final HBaseAdmin adminVc; 
	
	private final HTableDescriptor Tags_desc; 
	
	private final HTableDescriptor PIReduce_desc;
	
	private final HTable table;
	
	private static final Log LOG = LogFactory.getLog(PI2HbaseAdapter.class.getName());
	
	private int MaxVersion = Integer.MAX_VALUE;
	
	private PropertiesConfiguration config;
	
	private String[] tags;
	
	public PI2HbaseAdapter() throws IOException, NestableException
	{
		conf  		= new HBaseConfiguration();
//		conf.set("hbase.zookeeper.quorum", "localhost:2181");
		admin 		= new HBaseAdmin(conf);
/*		
		confVc  		= new HBaseConfiguration();
		confVc.set("hbase.zookeeper.quorum", "10.2.42.7:2181");
		confVc.set("hbase.rootdir", "hdfs://10.2.42.7:54310/hbase");
		confVc.set("hbase.master", "10.2.42.7:6000");
		confVc.set("hbase.cluster.distributed","true");
		adminVc 		= new HBaseAdmin(confVc);
*/
		Tags_desc	    = new HTableDescriptor(Tags_TABLE_NAME);
		PIReduce_desc	= new HTableDescriptor(PIReduce_TABLE_NAME);
		config = new PropertiesConfiguration("tag.properties");
		tags = config.getStringArray("tag.name"); 
		table 		= CreatTable();	
	}
	
	
	public  HBaseConfiguration getConf() {
		return conf;
	}
/*	
	public HBaseConfiguration getConfVc() {
		return confVc;
	}

*/


	public HTable CreatTable()throws IOException, NestableException
	{
		
		HColumnDescriptor column_desc = new HColumnDescriptor(Tags_FAMILY);
		column_desc.setMaxVersions(MaxVersion);
		
		Tags_desc.addFamily(column_desc);
		if(!admin.tableExists(Tags_TABLE_NAME))
		{
			admin.createTable(Tags_desc);
			LOG.info("PIData Table Created");
		}
		else
			LOG.info("PIData Table Has been existed");
		
		PIReduce_desc.addFamily(column_desc);
		if(!admin.tableExists(PIReduce_TABLE_NAME))
		{
			admin.createTable(PIReduce_desc);
			LOG.info("PIReduce Table Created");
		}
		else
			LOG.info("PIReduce Table Has been existed");
		
		HTable Table=new HTable(conf,Tags_TABLE_NAME);
		
		return Table;
		
	}
	
	
	public void put(String tagname, String qualifier,long ts, String value) throws IOException
	{
		Put put = new Put(Bytes.toBytes(tagname));
		
        put.add(Bytes.toBytes(Tags_FAMILY),Bytes.toBytes(qualifier),ts,
        		value.getBytes());
        
        table.put(put);		
		
	}
	
	public String get(String tagName, String qualifier) throws IOException
	{
		String value = null;
		Get get = new Get(Bytes.toBytes(tagName));
		get.addColumn(Bytes.toBytes(Tags_FAMILY),Bytes.toBytes(qualifier));
		Result r=table.get(get);
		System.out.println("Result by qualifier:"+r);
		for(KeyValue kvs:r.raw())
		 {
			 System.out.println(Bytes.toString(kvs.getFamily())+":"+Bytes.toString(kvs.getQualifier()) + ", "
		              + Bytes.toString(kvs.getValue()));
			 value = Bytes.toString(kvs.getValue());
		 }
		return value;
	}


	public PropertiesConfiguration getConfig() {
		return config;
	}


	public String[] getTags() {
		return tags;
	}


	public static String[] getInfoNames() {
		return INFO_NAMES;
	}




	public static void main(String[] args) throws IOException, Exception {
		// arguments: java getSnap url2 mytagsearchpattern
		// Example: java getSnap mydas mypi sin%
		PI2HbaseAdapter piadapter = new PI2HbaseAdapter();
		PropertiesConfiguration config = piadapter.getConfig();
		String[]  tags = piadapter.getTags();
		
		Connection con = null;
		Properties plist = new Properties();
		plist.put("user", "LabVisitor");
		plist.put("password", "Guest-user123");
		String url = "jdbc:pisql://" + config.getString("pidas") + "/Data Source=" + config.getString("piserver") + "; Integrated Security=SSPI";
		String driver = "com.osisoft.jdbc.Driver";
		Statement stmt;
		ResultSet rs;
		
		//configure for HBase job
		HBaseConfiguration confMap = piadapter.getConf();
//		HBaseConfiguration confReduce = piadapter.getConfVc();
		
		Job jobMap = new Job(confMap,"PI_MapReduce");
//		Job jobReduce = new Job(confReduce,"PI_Reduce");
		jobMap.setJarByClass(PiMapreduce.class);
//		jobReduce.setJarByClass(PiMapreduce.class);
		
	    jobMap.setOutputFormatClass(TableOutputFormat.class);
//	    FileOutputFormat.setOutputPath(jobMap, new Path("/tmp"));
	  
//	    jobReduce.setOutputFormatClass(TableOutputFormat.class);
//	    FileOutputFormat.setOutputPath(jobMap, new Path("/tmp"));
	    
		Scan scan = new Scan();
		scan.addColumns("Info");
		scan.setFilter(new FirstKeyOnlyFilter());
        TableMapReduceUtil.initTableMapperJob("PIData", scan, PiMapreduce.Mapper.class, ImmutableBytesWritable.class,
                Put.class, jobMap);

        TableMapReduceUtil.initTableReducerJob("PiReduce", PiMapreduce.Reducer.class, jobMap);	 

        
		try 
		{
			 Class.forName(driver).newInstance();
			 con = DriverManager.getConnection(url, plist);		
			 stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
	         DatabaseMetaData md = con.getMetaData();
			 System.out.println(md.getDriverName() + " " + md.getDriverVersion()); 
			 System.out.println(md.getDatabaseProductName());
			 System.out.println(md.getDatabaseProductVersion() + "\n");	
			 FileOutputStream filestream = new FileOutputStream("/tmp/translatency");
			 PrintWriter fout = new PrintWriter(filestream);
			 
			 while(true)
			 {
				 for(int i=0;i<tags.length;i++)
				 {
					rs = stmt.executeQuery("SELECT TOP 1 tag, time, value FROM picomp2 WHERE tag = '" + tags[i] + "' ORDER BY tag, time DESC" );
			    	while (rs.next())
			    	{
						String value, time,tag;
						tag = rs.getString(1);
						time= rs.getString(2);
						value = rs.getString(3);
						if(value.contains("+"))
						{
							value = value.substring(value.indexOf('+')+1, value.indexOf('+')+6);
						}
						System.out.println(tag+" "+time+"  "+value);
						DateFormat formatter = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy");
						Date date= formatter.parse(time);
						piadapter.put(tag, "Value", date.getTime(), value);
						fout.println(System.currentTimeMillis()-date.getTime());
	//					piadapter.put(tag, "Value", System.currentTimeMillis(), value);
					}
				 }
				 
				 //the map-reduce work should begin after the write process
				System.out.println("Time before Map:"+System.currentTimeMillis());
				 if(jobMap.waitForCompletion(true))
					 System.out.println("Time after Map:"+System.currentTimeMillis());
/*				 
				System.out.println("Time before reduce:"+System.currentTimeMillis());
				 if(jobReduce.waitForCompletion(true))
					 System.out.println("Time after reduce:"+System.currentTimeMillis());
*/				 
				Thread.sleep(60000);//let the system sleep for 5s,
			 }
		}
		catch (Exception e) {
			e.printStackTrace();
		}
		finally {
			if (con != null) {
				try { con.close (); }
				catch (SQLException e) {e.printStackTrace();}
			}
		}	

	}	

}
