package edu.indiana.d2i.util.hector;

import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;

import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;

import edu.indiana.d2i.mapreduce.HectorConfigHelper;
import edu.indiana.d2i.mapreduce.HectorColumnFamilySplit;

import me.prettyprint.cassandra.serializers.BytesArraySerializer;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.service.CassandraHostConfigurator;
import me.prettyprint.cassandra.service.ThriftCluster;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.HSuperColumn;
import me.prettyprint.hector.api.beans.SuperRow;
import me.prettyprint.hector.api.beans.SuperRows;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.query.MultigetSuperSliceQuery;
import me.prettyprint.hector.api.query.QueryResult;

public class HTRCClient implements HectorHadoopClient {
	private String KEYSPACE_NAME = "Yimdata";
	private String CASSANDRA_EPR = "coffeetree.cs.indiana.edu";
	private String CASSANDRA_CLUSTER_NAME = "Yiming Coffeetree Cluster";
	private String CASSDANDRA_PORT = "9160";
	private String SUPER_COLUMN_FAMILY = "VolumeContents";
	private String REG_COLUMN_NAME = "contents";
	
	private Cluster cluster = null;
	private Keyspace keyspaceOperator = null;
	protected InputSplit split = null;
	
	private List<String> keyList = null;
	private int currentIndex = 0;
	int maxGrabSize = 1;
	
	public HTRCClient setCassandraEPR(String epr) {
		CASSANDRA_EPR = epr;
		return this;
	}
	public HTRCClient setClusterName(String cluster) {
		CASSANDRA_CLUSTER_NAME = cluster;
		return this;
	}
	public HTRCClient setCassandraPort(String port) {
		CASSDANDRA_PORT = port;
		return this;
	}
	public HTRCClient setKeySpace(String keysapce) {
		KEYSPACE_NAME = keysapce;
		return this;
	}
	public HTRCClient setMaxGrabSize(int maxGrabSize) {
		this.maxGrabSize = maxGrabSize;
		return this;
	}
	
	public void buildClient(List<String> keylist) {
		cluster = new ThriftCluster(CASSANDRA_CLUSTER_NAME,
				new CassandraHostConfigurator(
						CASSANDRA_EPR + ":"	+ CASSDANDRA_PORT));
		keyspaceOperator = HFactory.createKeyspace(KEYSPACE_NAME, cluster);
		this.keyList = keylist;
	}
	
	public HTRCClient(Configuration conf, InputSplit split) throws CharacterCodingException {
		cluster = new ThriftCluster(HectorConfigHelper.getClusterName(conf),
				new CassandraHostConfigurator(
						HectorConfigHelper.getInitialAddress(conf) + ":"
								+ HectorConfigHelper.getRpcPort(conf)));
		keyspaceOperator = HFactory.createKeyspace(
				HectorConfigHelper.getInputKeyspace(conf), cluster);
		this.split = split;
		List<ByteBuffer> list = ((HectorColumnFamilySplit) split).getKeyList();
		for (ByteBuffer byteBuffer : list) {
			keyList.add(ByteBufferUtil.string(byteBuffer));
		}
        maxGrabSize = HectorConfigHelper.getRangeBatchSize(conf);

	}
	
	public HTRCClient() {}
	
	public void close() {
		cluster.getConnectionManager().shutdown();
	}
	
	public SortedMap<String, List<HColumn<?, byte[]>>> read() {
		StringSerializer stringSerializer = StringSerializer.get();
		BytesArraySerializer byteSerializer = BytesArraySerializer.get();
		MultigetSuperSliceQuery<String, String, String, byte[]> query = HFactory
				.createMultigetSuperSliceQuery(keyspaceOperator,
						stringSerializer, stringSerializer, stringSerializer,
						byteSerializer);
		query.setColumnFamily(SUPER_COLUMN_FAMILY);
		
		List<String> keyslice = new ArrayList<String>();
		int count = currentIndex;
		while (count < keyList.size() && count < (currentIndex+maxGrabSize)) {
			keyslice.add(keyList.get(count));
			count++;
		}
		query.setKeys(keyslice);
		currentIndex = count;

		// ???? fix me ????
		List<String> columnRange = new ArrayList<String>();
		for (int i = 1; i <= Short.MAX_VALUE; i++) {
			columnRange.add(String.format("%08d", i));
		}
		query.setColumnNames(columnRange);
		
		// return the result
		QueryResult<SuperRows<String, String, String, byte[]>> result = query
				.execute();
		Iterator<SuperRow<String, String, String, byte[]>> iterator = result
				.get().iterator();

		SortedMap<String, List<HColumn<?, byte[]>>> readResult = 
			new TreeMap<String, List<HColumn<?, byte[]>>>();
		
		while (iterator.hasNext()) {
			SuperRow<String, String, String, byte[]> superRow = iterator
					.next();
			List<HSuperColumn<String, String, byte[]>> superColumns = superRow
					.getSuperSlice().getSuperColumns();

			// ?????
//			SortedMap<Object, HColumn> values = new TreeMap<Object, HColumn>();
			List<HColumn<?, byte[]>> values = new ArrayList<HColumn<?, byte[]>>();

			// loop over the pages
			for (HSuperColumn<String, String, byte[]> hSuperColumn : superColumns) {
				// ???
				if (hSuperColumn.getName().matches("\\d{8}")) {
					List<HColumn<String, byte[]>> columns = hSuperColumn
							.getColumns();
					// get the page content
					for (HColumn<String, byte[]> hColumn : columns) {
						if (hColumn.getName().equals(REG_COLUMN_NAME)) {
							values.add(hColumn);
						}
					}
				}
			}
			readResult.put(superRow.getKey(), values);
		}
		return readResult;
	}
	
	public Map<String, String> readVolumeContents() throws UnsupportedEncodingException {
		Map<String, String> result = new HashMap<String, String>();
		Map<String, List<HColumn<?, byte[]>>> contents = this.read();
		Iterator<String> iterator = contents.keySet().iterator();
		while (iterator.hasNext()) {
			String id = iterator.next();
			StringBuilder builder = new StringBuilder();
			for (HColumn<?, byte[]> element : contents.get(id)) {
				builder.append(new String(element.getValue(), "UTF-8"));
			}
			result.put(id, builder.toString());
		}
		
		return result;
	}
	
	/* test */
	public static void main(String[] args) throws Exception {
		String[] keylist = {"loc.ark:/13960/t55d8xj45", "loc.ark:/13960/t55d8xk8j", 
				"loc.ark:/13960/t55d8xm23", "loc.ark:/13960/t55d8xn4h"};
		HTRCClient client = new HTRCClient();
//		client.setMaxGrabSize(keylist.length).buildClient(Arrays.asList(keylist));
		client.buildClient(Arrays.asList(keylist));
		
		Map<String, String> volume = client.readVolumeContents();
		String id = volume.keySet().iterator().next();
		System.out.println(volume.get(id));
		
		
//		Map<String, List<HColumn<?, byte[]>>> contents = client.read();
//		Iterator<String> iterator = contents.keySet().iterator();
//		while (iterator.hasNext()) {
//			String key = iterator.next();
//			System.out.println("volume " + key + " has " + contents.get(key).size() + " pages");
//		}
		
		client.close();
	}
}
