package dacp.etl.kafka.hdfs.tools;

import java.io.IOException;
import java.io.InvalidObjectException;
import java.rmi.NoSuchObjectException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hive.hcatalog.common.HCatUtil;
import org.apache.thrift.TException;

import com.google.common.collect.Lists;

import io.confluent.connect.hdfs.errors.HiveMetaStoreException;

public class HiveOperation {
	private static IMetaStoreClient client;

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();

		HiveConf hiveConf = new HiveConf(conf, HiveConf.class);
		hiveConf.set("hive.metastore.uris", "thrift://hbbdcs-nn-02:9083");
		try {
			client = HCatUtil.getHiveMetastoreClient(hiveConf);

			if (args.length > 0) {
				if ("addpartition".equals(args[0])) {
					System.out.println("====add partition====");
					if (args.length == 3) {
						//addPartition(args[1], args[2]);
						addPartition2("default", args[1], args[2]);
					} else if (args.length == 4) {
						//addPartition(args[1], args[2], args[3]);
						addPartition2(args[1], args[2], args[3]);
						
						List<String> listPartitions = listPartitions(args[1], args[2], (short)2);
						for(String ps: listPartitions){
							System.out.println(ps);
						}
					}
				}
			}

			
			
		} catch (IOException | MetaException e) {
			throw new HiveMetaStoreException(e);
		}
	}

	public static void addPartition(final String tableName, final String path) {
		addPartition("default", tableName, path);
	}
	
	public static void addPartition2(final String dbName, final String tblName, final String path)
			throws MetaException, org.apache.hadoop.hive.metastore.api.NoSuchObjectException, 
			TException, InvalidObjectException {
		Table table = client.getTable(dbName, tblName);
		add_partition(table, getVals(path), path);
	}
	
	public static List<String> listPartitions(final String database, final String tableName, final short max) throws HiveMetaStoreException {
	    ClientAction<List<String>> listPartitions = new ClientAction<List<String>>() {
	      @Override
	      public List<String> call() throws TException {
	        List<Partition> partitions = client.listPartitions(database, tableName, max);
	        List<String> paths = new ArrayList<>();
	        for (Partition partition : partitions) {
	          paths.add(partition.getSd().getLocation());
	        }
	        return paths;
	      }
	    };
	    try {
	      return doAction(listPartitions);
	    } catch (MetaException e) {
	      throw new HiveMetaStoreException("Hive MetaStore exception", e);
	    } catch (TException e) {
	      throw new HiveMetaStoreException("Exception communicating with the Hive MetaStore", e);
	    }
	  }
	
	public static List<String> getVals(String location){
		String[] split = location.split("/");
		List<String> list = Lists.newArrayList();
		for(String s: split){
			String[] split2 = s.split("=");
			if(split2.length == 2){
				list.add(split2[1]);
			}
		}
		return list;
	}
	
	private static void add_partition(Table table,
		      List<String> vals, String location) throws InvalidObjectException,
		        AlreadyExistsException, MetaException, TException {

		    Partition part = new Partition();
		    part.setDbName(table.getDbName());
		    part.setTableName(table.getTableName());
		    part.setValues(vals);
		    part.setParameters(new HashMap<String, String>());
		    part.setSd(table.getSd().deepCopy());
		    part.getSd().setSerdeInfo(table.getSd().getSerdeInfo());
		    part.getSd().setLocation(location);

		    client.add_partition(part);
		  }

	public static void addPartition(final String database, final String tableName, final String path)
			throws HiveMetaStoreException {
		ClientAction<Void> addPartition = new ClientAction<Void>() {
			@Override
			public Void call() throws TException {
				client.appendPartition(database, tableName, path);

				return null;
			}
		};

		try {
			doAction(addPartition);
		} catch (AlreadyExistsException e) {
			// this is okay
		} catch (MetaException e) {
			throw new HiveMetaStoreException("Hive MetaStore exception", e);
		} catch (TException e) {
			throw new HiveMetaStoreException("Exception communicating with the Hive MetaStore", e);
		}
	}

	public static void addPartition() {

		/*
		String partitionName = partitionDir.getPath().getName();
		Partition partition;
		String location = BaseDir + "/" + partitionName;
		System.out.println(partitionName);
		try {
			partition = client.getPartition("default", tableName, partitionColumn + "=" + partitionName);
		} catch (NoSuchObjectException e) {
			StorageDescriptor sd = createStorageDescriptor(location, fieldlist);
			partition = new Partition();
			partition.setDbName("default");
			partition.setTableName(tableName);
			partition.setSd(sd);
			List<String> values = new ArrayList<String>();
			values.add(partitionName);
			partition.setValues(values);
			client.add_partition(partition);
		}
		*/
	}

	private interface ClientAction<R> {
		R call() throws TException;
	}

	private static <R> R doAction(ClientAction<R> action) throws TException {
		return action.call();
	}

}