package org.apache.hadoop.hive.cassandra.serde.cql;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class CqlMapSerde implements SerDe{
	
	private final static Logger LOGGER = LoggerFactory.getLogger(CqlMapSerde.class);
	
	private List<String> columns;

	private StructObjectInspector objectInspector;
	
	List<TypeInfo> columnTypes;
	
	int numColumns;
	
	private Set<Long> set = new HashSet<Long>();
	
	/**
	 * 建表的时候conf不为null,执行 insert 时候为null
	 * @param conf
	 * @param tbl
	 * @throws SerDeException
	 */
	@Override
	public void initialize(Configuration conf, Properties tbl) throws SerDeException {
		/**
		try {
			boolean kk = conf == null?true:false;
			LOGGER.info("conf id null + " + kk);
			Iterator<Map.Entry<String, String>> conf000 = conf.iterator();
	    	while(conf000.hasNext()){
	    		Map.Entry<String, String> entry = conf000.next();
	    		LOGGER.info("CqlMapSerde-conf-- key=" + entry.getKey() + ":values=" + entry.getValue());
	    	}
	    	Set<Map.Entry<Object,Object>> set = tbl.entrySet();
	    	for(Entry<Object,Object> entry:set){
	    		LOGGER.info("CqlMapSerde-tbl-- key=" + entry.getKey() + ":values=" + entry.getValue());
	    	}
		} catch (Exception e) {
			//e.printStackTrace();
		}
		**/
		
		LOGGER.info("系统列类型:{}",tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES));
		LOGGER.info("系统列名称:{}",tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS));
		
		String columnsStr  = tbl.getProperty(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING);
		if(columnsStr == null){
			columnsStr = tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS);
		}else {
			Set<String> columnsSysSet = new HashSet<String>();
			String columnsSysStr = tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS);
			String[] columnsSysStrs = columnsSysStr.split(",");
			for(String str:columnsSysStrs){
				columnsSysSet.add(str);
			}
			String[] columnNames = columnsStr.split(",");
			for(String str:columnNames){
				if(!columnsSysSet.contains(str)){
					throw new RuntimeException("列名:"+str +" 在hive表不存在:" + tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS));
				}
			}
		}
        String columnTypeProperty = tbl.getProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES);
        
        String[] columnNames = columnsStr.split(",");
        //String[] columnTypes = columnTypesStr.split(":");
        columns = Arrays.asList(columnNames);
        
        columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
        numColumns = columns.size();
        List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columns.size());
        for (int c = 0; c < numColumns; c++) {
          TypeInfo typeInfo = columnTypes.get(c);
          if (typeInfo instanceof PrimitiveTypeInfo) {
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) columnTypes.get(c);
            AbstractPrimitiveJavaObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
            columnOIs.add(oi);
          } else {
            throw new SerDeException(getClass().getName()
                + " doesn't allow column [" + c + "] named "
                + columns.get(c) + " with type " + columnTypes.get(c));
          }
         }
        objectInspector = ObjectInspectorFactory.getColumnarStructObjectInspector(columns, columnOIs);
        		
        		//createCqlObjectInspector();
	}

	@Override
	public Object deserialize(Writable blob) throws SerDeException {
		 if (!(blob instanceof MapWritable)) {
	        throw new SerDeException(getClass().getName() + ": expects MapWritable not " + blob.getClass().getName());
	     }
	    MapWritable columnMap = (MapWritable) blob;
	    List<Object> objects = new ArrayList<Object>();
		for( String column : columns ) {
			Text columnKey = new Text( column );
			if( columnMap.containsKey( columnKey ) ) {
				objects.add( columnMap.get( columnKey ).toString() );
			} else {
				objects.add( null );
			}
		}
		return objects;
	}

	@Override
	public ObjectInspector getObjectInspector() throws SerDeException {
		return objectInspector;
	}
	
	private StructObjectInspector createCqlObjectInspector() {
		List<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>( columns.size() );
		for( int i = 0; i < columns.size(); i++ ) {
			objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector );
			//TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo)
		}
		return ObjectInspectorFactory.getStandardStructObjectInspector( columns, objectInspectors );
	}

	@Override
	public SerDeStats getSerDeStats() {
		return null;
	}

	@Override
	public Class<? extends Writable> getSerializedClass() {
		return MapWritable.class;
	}
	

	@Override
	public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
		MapWritable hiveData = new MapWritable();
		final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
		final List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
		if (fields.size() != columns.size()) {
			throw new SerDeException(String.format("Required %d columns, received %d.", columns.size(),fields.size()));
		}
		int fieldCount = fields.size();
		for (int c = 0; c < fieldCount; c++) {
			StructField structField = fields.get(c);
			if (structField != null) {
				final Object field = structInspector.getStructFieldData(obj,fields.get(c));
				//TODO:currently only support hive primitive type
				final AbstractPrimitiveObjectInspector fieldOI = (AbstractPrimitiveObjectInspector)fields.get(c).getFieldObjectInspector();
				Writable value = (Writable)fieldOI.getPrimitiveWritableObject(field);
				if (value == null) {
					if(PrimitiveCategory.STRING.equals(fieldOI.getPrimitiveCategory())){
						value = NullWritable.get();	
						//value = new Text("");
					}else{
						//TODO: now all treat as number
						value = new IntWritable(0);
					}
				}
				if(value instanceof LongWritable){
					//LOGGER.info("phone==={}",value.toString() );
					LongWritable l = (LongWritable) value;
					set.add(l.get());
				}
				hiveData.put(new Text(columns.get(c)), value );
			}
		}
		//LOGGER.info("phone num is----:{}",set.size());
		return hiveData;
		/**
		MapWritable hiveData = new MapWritable();
		int position = 0;
		for( StructField field : objectInspector.getAllStructFieldRefs() ) {
			if( position >= columns.size() ) {
				break;
			}
			final Object fieldValue = objectInspector.getStructFieldData( obj, field );
			final ObjectInspector fieldOI = field.getFieldObjectInspector();
			final StringObjectInspector fieldStringOI = ( StringObjectInspector )fieldOI;
			Writable value = fieldStringOI.getPrimitiveWritableObject( fieldValue );
			if( value == null ) {
				value = new Text( "" );
			}
			hiveData.put( new Text(columns.get( position++)), value );
		}
		return hiveData;
		**/
	}

}
