package org.apache.hadoop.hive.cassandra.output.cql;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public abstract class AbstractCqlMapper {
	
	private final static Logger LOGGER = LoggerFactory.getLogger(AbstractCqlMapper.class);
	
	protected final int DEFAULT_BATCH_INSERT_SIZE = 100;
	protected int batchInsertSize = DEFAULT_BATCH_INSERT_SIZE;
	protected JobConf jobConf;
	protected ConsistencyLevel flevel;
	protected final static List<CqlPut> dataMap= new ArrayList<CqlPut>();
	protected static AtomicLong allData = new AtomicLong(0);
	

	
	public AbstractCqlMapper(JobConf jobConf) {
		super();
		this.jobConf = jobConf;
		batchInsertSize = jobConf.getInt(AbstractCassandraSerDe.CASSANDRA_BATCH_MUTATION_SIZE, DEFAULT_BATCH_INSERT_SIZE);
	}

	protected ConsistencyLevel getConsistencyLevel(JobConf jc) {
        String consistencyLevel = jc.get(AbstractCassandraSerDe.CASSANDRA_CONSISTENCY_LEVEL, AbstractCassandraSerDe.DEFAULT_CONSISTENCY_LEVEL);
        ConsistencyLevel level = null;
        try {
            level = ConsistencyLevel.valueOf(consistencyLevel);
        } catch (IllegalArgumentException e) {
            level = ConsistencyLevel.ONE;
        }
        return level;
    }
	
	public  void writeToCassandra(Writable writable) throws IOException{
		CqlPut put = (CqlPut) writable;
		dataMap.add(put);
		int size = dataMap.size();
		if(batchInsertSize == size){
			this.write2cassandra();
			dataMap.clear();
			allData.addAndGet(size);
			LOGGER.info(""+Thread.currentThread().getName() + " batch insert 2 casaandra " +allData.get()  + ":data"  );
		}
	}
	
	public  void finishWriteJob() throws IOException{
		if(!dataMap.isEmpty()){
			int size = dataMap.size();
			this.write2cassandra();
			allData.addAndGet(size);
			LOGGER.info(""+Thread.currentThread().getName() + "flush "+size+":data and all batch insert 2 casaandra " +allData.get()  + ":data"  );
		}
	}
	
	
	public abstract void write2cassandra()throws IOException;
}
