package cn.edu.thu.laud.thrift.service;

import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;

import org.antlr.runtime.RecognitionException;
import org.apache.cassandra.cql.LaUDQueryProcessor;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.thrift.Cassandra;
import org.apache.cassandra.thrift.CassandraServer;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.ColumnPath;
import org.apache.cassandra.thrift.Compression;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.CqlResult;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.apache.cassandra.thrift.NotFoundException;
import org.apache.cassandra.thrift.SchemaDisagreementException;
import org.apache.cassandra.thrift.TimedOutException;
import org.apache.cassandra.thrift.UnavailableException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.service.HiveServerException;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.edu.thu.laud.hive.service.HiveServer.HiveServerHandler;
import org.apache.hadoop.hive.service.ThriftHive;
import cn.edu.thu.laud.jdbc.util.LaUDJdbcServerUtils;
import cn.edu.thu.laud.jdbc.util.LaUDJdbcUtils;
import cn.edu.thu.laud.server.thrift.FileInfo;
import cn.edu.thu.laud.server.thrift.LaUD;
import cn.edu.thu.laud.server.thrift.LaUDColumnLoc;
import cn.edu.thu.laud.server.thrift.LaUDResult;
import cn.edu.thu.laud.server.thrift.LaUDServerException;
import cn.edu.thu.laud.thrift.LaUDCassandraServer;
import cn.edu.thu.laud.thrift.LaUDDaemon;
import cn.edu.thu.laud.thrift.LaUDDataServer;
import cn.edu.thu.laud.thrift.LaUDServer;
import cn.edu.thu.laud.thrift.customed.CustomedLaUDProcessor;
import cn.edu.thu.laud.thrift.service.ServiceFileInfo.FileStoreType;

public class LaudIfaceImpl implements LaUD.Iface{
	static Logger logger = LoggerFactory.getLogger(LaudIfaceImpl.class);

	private static Random random=new Random(System.currentTimeMillis());
	/**
	 * the way to create a new HiveIface.
	 */
	private HiveConf hiveConf;
	private Cassandra.Iface cassandraIface;
	/**
	 * by this way ,we make sure each connection has a persional HiveInterface.
	 */
	private ThreadLocal<CustomedLaUDProcessor> processorLocal=new ThreadLocal<CustomedLaUDProcessor>();

	private ThreadLocal<ClientState> clientState;
//	=new ThreadLocal<ClientState>(){
//
//		@Override
//		protected ClientState initialValue() {
//			LaUDClientState laUDClientState=new LaUDClientState();
//			laUDClientState.setSessionId("id"+Thread.currentThread().getId()+"t"+System.currentTimeMillis());
//			LaUDDataServer.authorizedUsers.put(laUDClientState.getSessionId(), laUDClientState);
//			return laUDClientState;
//		}
//	};
	public ClientState state()
	{
		if(cassandraIface!=null){
			return ((LaUDCassandraServer)cassandraIface).state();
		}else{
			//TODO error
			return null;
		}
	}
	/**
	 * after construction ,we must run initCassandraIface().
	 * @param conf
	 * @param cIface
	 */
	public LaudIfaceImpl(HiveConf conf,ThreadLocal<ClientState> clientState) {
		this.hiveConf=conf;
		this.clientState=clientState;
	}
	public void initCassandraIface(Cassandra.Iface cIface){
		cassandraIface=cIface;
	}
	public  ClientState getClientState(){
		return clientState.get();
	}
	@Override
	public LaUDColumnLoc get_column_name(ByteBuffer key,
			ColumnParent column_parent, int location, ByteBuffer lastName,
			int lastLoc, ConsistencyLevel consistency_level)
					throws InvalidRequestException, UnavailableException,
					TimedOutException, TException {
		// TODO Auto-generated method stub
		return null;
	}

	@Override
	public void executeHiveQl(String query) throws HiveServerException,
	TException {
		try {
			ThriftHive.Iface hiveServerHandler=this.getProcessor().getHiveIface();
			if(hiveServerHandler==null){
				hiveServerHandler=new HiveServerHandler(hiveConf);
				this.getProcessor().setHiveIface(hiveServerHandler);
				logger.info("create a new hiveServerHandler");
			}
			hiveServerHandler.execute(query);
		} catch (MetaException e) {
			e.printStackTrace();
			HiveServerException exception=new HiveServerException();
			exception.setStackTrace(e.getStackTrace());
		}

	}

	@Override
	public LaUDResult execute(ByteBuffer query,Compression compression) throws  UnavailableException, InvalidRequestException, TimedOutException, SchemaDisagreementException,HiveServerException, LaUDServerException, TException {
		return this.execute_with_parameters(query, compression, Collections.<ByteBuffer> emptyList());
	}

	@Override
	public CqlResult executeCql(ByteBuffer query, Compression compression)
			throws InvalidRequestException, UnavailableException,
			TimedOutException, SchemaDisagreementException, TException {
		return cassandraIface.execute_cql_query(query, compression);
	}

	protected LaUDResult executeCql(String query,List<ByteBuffer>parameters) throws UnavailableException, InvalidRequestException, TimedOutException, SchemaDisagreementException{
		try
		{
			ClientState cState = ((CassandraServer)cassandraIface).state();
//			if (cState.getCQLVersion().major == 2)
				return LaUDQueryProcessor.process(query, ((CassandraServer)cassandraIface).state(),parameters);
//			else
//				return org.apache.cassandra.cql3.QueryProcessor.process(query, cState);
		}
		catch (RecognitionException e)
		{
			InvalidRequestException ire = new InvalidRequestException("Invalid or malformed CQL query string");
			ire.initCause(e);
			throw ire;
		}
	}

	public HiveConf getHiveConf() {
		return hiveConf;
	}

	public void setHiveConf(HiveConf hiveConf) {
		this.hiveConf = hiveConf;
	}
	public CustomedLaUDProcessor getProcessor() {
		return this.processorLocal.get();
	}
	public void setProcessor(CustomedLaUDProcessor processor) {
		this.processorLocal.set(processor);
	}
	@Override
	public String get_laud_session_id() throws LaUDServerException, TException {
		return ((LaUDClientState)clientState.get()).getSessionId();
	}
	protected List<cn.edu.thu.laud.server.thrift.FileInfo> getFileInfos(){
		List<cn.edu.thu.laud.server.thrift.FileInfo> list=new ArrayList<cn.edu.thu.laud.server.thrift.FileInfo>();
		FileInfo info=new cn.edu.thu.laud.server.thrift.FileInfo().setCheckcode(""+System.currentTimeMillis()).setInfo("/mnt/wine/temp/temp/1.jpg");
		list.add(info);
		info=new cn.edu.thu.laud.server.thrift.FileInfo().setCheckcode(""+System.currentTimeMillis()+random.nextInt()).setInfo("/mnt/wine/temp/temp/1.txt");
		list.add(info);
		return list;
	}
	protected List<FileInfo> getFolderFileInfos(){
		List<cn.edu.thu.laud.server.thrift.FileInfo> list=new ArrayList<cn.edu.thu.laud.server.thrift.FileInfo>();
		FileInfo info=new cn.edu.thu.laud.server.thrift.FileInfo().setCheckcode(""+System.currentTimeMillis()+random.nextInt()).setInfo("/mnt/wine/temp/temp");
		list.add(info);;
		return list;
	}
	protected List<FileInfo> getDownloadFiles(){
		FileInfo info=new FileInfo().setCheckcode(""+System.currentTimeMillis()+random.nextInt()).setInfo("/tmp/hdfsresult/testks1/user1/");
		return Collections.singletonList(info);
	}
	
	
	public ThreadLocal<ClientState> getThreadLocalClientState() {
		return this.clientState;
	}
	
	@Override
	public boolean insert_into_kv(String sessionId, String checkcode,
			ByteBuffer insertdata) throws InvalidRequestException,
			UnavailableException, TimedOutException,
			SchemaDisagreementException, HiveServerException,
			LaUDServerException, TException {
		LaUDClientState state=LaUDDataServer.authorizedUsers.get(sessionId);
		//}
		if(state==null){
			logger.debug("wrong sessionId,can not get user session.");
			throw new InvalidRequestException("wrong sessionId,can not get user session.");
		}
		if(checkcode.equals("")){
			logger.debug("empty checkcode.");
			return false;
		}
		ServiceFileInfo info=state.getFileInfo(checkcode);
		if(info==null){
			logger.debug("wrong checkCode,not allowed user to transfer files.");
			throw new InvalidRequestException("wrong checkCode,not allowed user to transfer files.");
			
		}
		if(info.getType().equals(FileStoreType.C_KV)&&info.getDirection()==LaUDJdbcUtils.WRITE_INTO_CASSANDRA){
			boolean insert=LaUDDaemon.instance.getCassandraServer().file_internal_insert(state, info.keyBuffer, info.columnFamily, info.columnNameBuffer,insertdata, info.attrs);
			state.removeFileDataSchedule(checkcode);
			return insert;
		}
		return false;
	}
	@Override
	public ByteBuffer get_from_kv(String sessionId, String checkcode)
			throws InvalidRequestException, NotFoundException,
			UnavailableException, TimedOutException, LaUDServerException,
			TException {
		LaUDClientState state=LaUDDataServer.authorizedUsers.get(sessionId);
		//}
		if(state==null){
			logger.debug("wrong sessionId,can not get user session.");
			throw new InvalidRequestException("wrong sessionId,can not get user session.");
			
		}
		if(checkcode.equals("")){
			logger.debug("empty checkcode.");
			return null;
		}
		ServiceFileInfo info=state.getFileInfo(checkcode);
		if(info==null){
			logger.debug("wrong checkCode,not allowed user to transfer files.");
			throw new InvalidRequestException("wrong checkCode,not allowed user to transfer files.");
		}
		if(info.getType().equals(FileStoreType.C_KV)&&info.getDirection()==LaUDJdbcUtils.READ_FROM_CASSANDRA){
			//TODO FIXME
			Column column=LaUDDaemon.instance.getCassandraServer().file_internal_get(this.getClientState(), info.keyBuffer, new ColumnPath(info.columnFamily).setColumn(info.columnNameBuffer), info.attrs==null?ConsistencyLevel.ONE:info.attrs.getConsistencyLevel());
			
			//ColumnOrSuperColumn columnOrSuperColumn=this.getProcessor().getCassandraIface().get(info.keyBuffer, new ColumnPath(info.columnFamily), info.attrs.getConsistencyLevel());
			//return columnOrSuperColumn.column.value;
			return column.value;
		}
		return null;
	}
	@Override
	public LaUDResult execute_with_parameters(ByteBuffer query,
			Compression compression, List<ByteBuffer> parameters)
			throws InvalidRequestException, UnavailableException,
			TimedOutException, SchemaDisagreementException,
			HiveServerException, LaUDServerException, TException {
		//TODO this is a simple query.
		
				String lasql=null;
				lasql=LaUDJdbcServerUtils.uncompress(query, compression);
				if(lasql.endsWith("with olap")){
					if(LaUDServer.includeHive){
					this.executeHiveQl(lasql.substring(0,lasql.indexOf("with olap")));
					LaUDResult result=new LaUDResult();
					result.setType(LaUDJdbcUtils.TYPE_HIVE);
					return result;
					}else{
						return new LaUDResult().setType(LaUDJdbcUtils.TYPE_HIVE);
					}
				}else{
					return executeCql(lasql,parameters);
				}
	}
}
