package org.apache.hadoop.hive.cassandra.output.cql;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.apache.cassandra.thrift.Compression;
import org.apache.cassandra.thrift.CqlPreparedResult;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.hadoop.hive.cassandra.CassandraProxyClient;
import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe;
import org.apache.hadoop.mapred.JobConf;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class ThriftServerCqlMapper extends AbstractCqlMapper{
	
	public static final Logger LOG = LoggerFactory.getLogger(ThriftServerCqlMapper.class);
	
	private String cassandraKeySpace;
	private CassandraProxyClient client;

	
	public ThriftServerCqlMapper(JobConf jobConf,CassandraProxyClient client) {
		super(jobConf);
		cassandraKeySpace = jobConf.get(AbstractCassandraSerDe.CASSANDRA_KEYSPACE_NAME);
		flevel = getConsistencyLevel(jobConf);
		batchInsertSize = jobConf.getInt(AbstractCassandraSerDe.CASSANDRA_BATCH_MUTATION_SIZE, DEFAULT_BATCH_INSERT_SIZE);
		this.client = client;
	}
	
	
	@Override
	public void write2cassandra()throws IOException {
		StringBuilder outQueryBuilder = new StringBuilder("BEGIN BATCH ");
		List<ByteBuffer> values = new ArrayList<ByteBuffer>();
		for(CqlPut cqlPut:dataMap){
	        StringBuilder valuesBuilder = new StringBuilder(" VALUES (");
	        StringBuilder queryBuilder = new StringBuilder(" INSERT INTO ");
	        queryBuilder.append(jobConf.get(AbstractCassandraSerDe.CASSANDRA_CF_NAME));
	        queryBuilder.append("(");
	        Iterator<CqlColumn> iter = cqlPut.getColumns().iterator();
	        while (iter.hasNext()) {
	            CqlColumn column = iter.next();
	            String columnName = new String(column.getColumn());
	            queryBuilder.append(columnName);
	            valuesBuilder.append("?");
	            values.add(ByteBuffer.wrap(column.getValue()));
	            if (iter.hasNext()) {
	                queryBuilder.append(",");
	                valuesBuilder.append(",");
	            }
	        }
	        queryBuilder.append(")");
	        valuesBuilder.append(")");
	        queryBuilder.append(valuesBuilder);
	        queryBuilder.append(";");
	        outQueryBuilder.append(queryBuilder);
		}
		outQueryBuilder.append(" APPLY BATCH;");
		System.out.println("batch cql sql---->" + outQueryBuilder.toString());
        LOG.info("batch cql sql---->" + outQueryBuilder.toString());

        try {
            //tODO check compression
            client.getProxyConnection().set_keyspace(cassandraKeySpace);
            CqlPreparedResult result = client.getProxyConnection().prepare_cql3_query(ByteBufferUtil.bytes(outQueryBuilder.toString()), Compression.NONE);
            client.getProxyConnection().execute_prepared_cql3_query(result.itemId, values, flevel);
        } catch (InvalidRequestException e) {
            throw new IOException(e);
        } catch (TException e) {
            throw new IOException(e);
        }
	}
	
	
	
}
