package org.apache.hadoop.hive.cassandra.output.cql;

import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.mapred.JobConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.datastax.driver.core.BatchStatement;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Session;

public class DatastaxCqlMapper extends AbstractCqlMapper{

	public static final Logger LOG = LoggerFactory.getLogger(DatastaxCqlMapper.class);
	
	private Session session; 
	
	private Map<String, String> map = new HashMap<String, String>();
	
	
	
	public DatastaxCqlMapper(Session session, JobConf jobConf) {
		super(jobConf);
		this.session = session;
		
		System.out.println("222"+jobConf.get(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING));
		System.out.println("333"+jobConf.get(AbstractCassandraSerDe.CASSANDRA_COL_TYPE_MAPPING));
		
		String columnsStr = jobConf.get(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING);
        String columnTypesStr = jobConf.get(AbstractCassandraSerDe.CASSANDRA_COL_TYPE_MAPPING);
        
        String[] columnNames = columnsStr.split(",");
        String[] columnTypes = columnTypesStr.split(":");
        if (columnNames.length != columnTypes.length) {
        	throw new RuntimeException("获取到的hive表列名和列类型长度不一致");
        }
        int len = columnNames.length;
        for(int i=0;i<len;i++){
        	map.put(columnNames[i], columnTypes[i]);
        }
	}

	@Override
	public void write2cassandra(){
		if(dataMap.size() > 0){
			CqlPut cqlPut0 = dataMap.get(0);
			StringBuffer sql = new StringBuffer();
			StringBuilder valuesBuilder = new StringBuilder(" VALUES (");
			sql.append("insert into ");
			sql.append(jobConf.get(AbstractCassandraSerDe.CASSANDRA_CF_NAME));
			sql.append("(");
			Iterator<CqlColumn> iter0 = cqlPut0.getColumns().iterator();
	        while (iter0.hasNext()) {
	            CqlColumn column = iter0.next();
	            String columnName = new String(column.getColumn());
	            sql.append(columnName);
	            valuesBuilder.append("?");
	            if (iter0.hasNext()) {
	                sql.append(",");
	                valuesBuilder.append(",");
	            }
	        }
	        sql.append(")");
	        valuesBuilder.append(")");
	        sql.append(valuesBuilder);
	        LOG.info("sql is :{}",sql.toString());
	        PreparedStatement ps = session.prepare(sql.toString());
	        BatchStatement batch = new BatchStatement();
	        for(CqlPut cqlPut:dataMap){
	        	BoundStatement bs = ps.bind();
		        Iterator<CqlColumn> iter = cqlPut.getColumns().iterator();
		        while (iter.hasNext()) {
		            CqlColumn column = iter.next();
		            String columnName = new String(column.getColumn());
		            //bs.bind(columnName,ByteBuffer.wrap(column.getValue()));
		            bs.setBytes(columnName, ByteBuffer.wrap(column.getValue()));
		            if(map.containsKey(columnName)){
		            	if("int".equals(map.get(columnName))){
		            		//bs.set(columnName, ByteBuffer.wrap(column.getValue()), Integer.class);
		            		
		            	}
		            }
		        }
		        batch.add(bs);
			}
	        ResultSet rs = session.execute(batch);
	        LOG.info("batch insert result:{}",rs.toString());
		}
	}
	
	
}
