package cn.ilikes.tools.hbase.jpa.clients.datasource;

import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.slf4j.Logger;

import cn.ilikes.tools.hbase.jpa.SysConfigs;
import cn.ilikes.tools.hbase.jpa.clients.admins.JPAHbaseAdmin;
import cn.ilikes.tools.hbase.jpa.metadatas.sub.configs.Datastorefactory.DataStore;
import cn.ilikes.tools.logs.Loggerfactory;

/**
 * HBASE Data Source
 * 
 * @author fack
 * 
 */
public class HbaseDataSource extends AbDataSource {

	private Logger logger = Loggerfactory.instance(HbaseDataSource.class);

	/** The Constant DEFAULT_POOL_SIZE. */
	private static final int DEFAULT_POOL_SIZE = 100;

	private static String hbase_zookeeper_quorum = "hbase.zookeeper.quorum";

	private static String hbase_zookeeper_property_client_port = "hbase.zookeeper.property.clientPort";

	/** The h table pool. */
	private HTablePool hTablePool;

	private Configuration hadoopConf;

	public HbaseDataSource(DataStore dataStore) {
		super(dataStore);
		init(dataStore);
	}

	public HTableInterface findHTable(String tableName) {
		if (Loggerfactory.isdebug(logger))
			Loggerfactory.debug(logger, "find tableName : " + tableName + " HTable ");
		HTableInterface tableInterface = hTablePool.getTable(tableName);

		return tableInterface;
	}

	private void init(DataStore dataStore) {
		// create hTablePool
		createTablePool(dataStore);
	}

	public Configuration getHadoopConf() {
		return hadoopConf;
	}

	private void setHadoopConf(Configuration hadoopConf) {
		this.hadoopConf = hadoopConf;
	}

	private void validateMaster(DataStore dataStore) {

		Properties properties = dataStore.getConnection().getProperties();
		String node = properties.getProperty(SysConfigs.hbase_datasore_master_node);
		String port = properties.getProperty(SysConfigs.hbase_datasore_master_port);
		String poolSize = properties.getProperty(SysConfigs.hbase_datasore_master_pool_size);

		if (node == null)
			throw new JPAHBaseDataSourceRuntimeException("dataStore  properties  " + SysConfigs.hbase_datasore_master_node + " not set value ");

		if (port == null)
			throw new JPAHBaseDataSourceRuntimeException("dataStore  properties  " + SysConfigs.hbase_datasore_master_port + " not set value ");

		if (poolSize == null)
			throw new JPAHBaseDataSourceRuntimeException("dataStore  properties  " + SysConfigs.hbase_datasore_master_pool_size + " not set value ");

	}

	private void validateZookeeper(DataStore dataStore) {

		Properties properties = dataStore.getConnection().getProperties();

		String zookeeperHost = properties.getProperty(SysConfigs.hbase_datasore_zookeeper_quorum);

		String zookeeperPort = properties.getProperty(SysConfigs.hbase_datasore_zookeeper_client_port);

		if (zookeeperHost == null)
			throw new JPAHBaseDataSourceRuntimeException("dataStore  properties  " + SysConfigs.hbase_datasore_zookeeper_quorum + " not set value ");

		if (zookeeperPort == null)
			throw new JPAHBaseDataSourceRuntimeException("dataStore  properties  " + SysConfigs.hbase_datasore_zookeeper_client_port + " not set value ");

	}

	private void createTablePool(DataStore dataStore) {
		if (hTablePool == null) {
			synchronized (this) {
				if (dataStore == null)
					throw new JPAHBaseDataSourceRuntimeException("dataStore is null");
				Loggerfactory.info(logger, "start  create " + dataStore.getName() + " hbase table Pool ! ");
				validateMaster(dataStore);

				Properties properties = dataStore.getConnection().getProperties();
				String node = properties.getProperty(SysConfigs.hbase_datasore_master_node);
				String port = properties.getProperty(SysConfigs.hbase_datasore_master_port);
				String poolSize = properties.getProperty(SysConfigs.hbase_datasore_master_pool_size);
				Loggerfactory.info(logger, "hbase table Pool config   node : " + node + " port : " + port + "  poolSize : " + poolSize);
				Configuration hadoopConf = new Configuration();
				hadoopConf.set("hbase.master", node + ":" + port);

				validateZookeeper(dataStore);
				// String zookeeperHost = conn.getProperties().getProperty("hbase.zookeeper.quorum");
				// String zookeeperPort = conn.getProperties().getProperty("hbase.zookeeper.property.clientPort");

				String zookeeperHost = properties.getProperty(SysConfigs.hbase_datasore_zookeeper_quorum);
				String zookeeperPort = properties.getProperty(SysConfigs.hbase_datasore_zookeeper_client_port);

				Loggerfactory.info(logger, "hbase zookeeper  host : " + zookeeperHost + " port : " + zookeeperPort);

				hadoopConf.set(hbase_zookeeper_quorum, zookeeperHost != null ? zookeeperHost : node);
				hadoopConf.set(hbase_zookeeper_property_client_port, zookeeperPort != null ? zookeeperPort : "2181");

				int poolSize_ = poolSize != null ? Integer.parseInt(poolSize) : DEFAULT_POOL_SIZE;
				HTablePool hTablePool = new HTablePool(hadoopConf, poolSize_);
				this.sethTablePool(hTablePool);
				this.setHadoopConf(hadoopConf);

			}
		}

	}

	@Override
	public HbaseDataWrite getDataWrite() {

		return (HbaseDataWrite) super.getDataWrite();
	}

	@Override
	public HbaseDataRead getDataRead() {

		return (HbaseDataRead) super.getDataRead();
	}

	@Override
	protected DataWrite createDataWrite() {

		HbaseDataWrite hbaseDataWrite = new HbaseDataWrite();
		return hbaseDataWrite;
	}

	@Override
	protected DataRead createDataRead() {
		HbaseDataRead hbaseDataRead = new HbaseDataRead();
		return hbaseDataRead;
	}

	private void sethTablePool(HTablePool hTablePool) {
		this.hTablePool = hTablePool;
	}

}