package org.apache.hadoop.hive.cassandra.output.cql;

import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Map.Entry;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class HiveCqlOutputFormat implements HiveOutputFormat<Text, CqlPut>,OutputFormat<Text, CqlPut> {

    static final Logger LOGGER = LoggerFactory.getLogger(HiveCqlOutputFormat.class);

    @Override
    public FileSinkOperator.RecordWriter getHiveRecordWriter(final JobConf jc, Path finalOutPath,
            Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties,
            Progressable progress) throws IOException {
    	/**
    	Iterator<Entry<Object, Object>> it = tableProperties.entrySet().iterator();  
        while (it.hasNext()) {  
            Entry<Object, Object> entry = it.next();  
            Object key = entry.getKey();  
            Object value = entry.getValue();  
            LOGGER.info("getHiveRecordWriter-tableProperties---key   :" + key+" ---value :" + value);  
        } 
        Iterator<Map.Entry<String, String>> sdsd = jc.iterator();
        while(sdsd.hasNext()){
        	Entry<String, String> entry = sdsd.next();  
            String key = entry.getKey();  
            String value = entry.getValue();  
            LOGGER.info("getHiveRecordWriter-jobConf---key   :" + key+" ---value :" + value); 
        }
        **/
    	/**
        final String cassandraKeySpace = jc.get(AbstractCassandraSerDe.CASSANDRA_KEYSPACE_NAME);
        final String cassandraHost = jc.get(AbstractCassandraSerDe.CASSANDRA_HOST);
        final int cassandraPort =  Integer.parseInt(jc.get(AbstractCassandraSerDe.CASSANDRA_PORT));

        final CassandraProxyClient client;
        try {
            client = new CassandraProxyClient(cassandraHost, cassandraPort, true, true);
        } catch (CassandraException e) {
            throw new IOException(e);
        }

        return new FileSinkOperator.RecordWriter() {

            @Override
            public void close(boolean abort) throws IOException {
                if (client != null) {
                    client.close();
                }
            }

            @Override
            public void write(Writable w) throws IOException {
                Put put = (Put) w;
                put.write(cassandraKeySpace, client, jc);
            }

        };
        **/
    	return new CqlRecordWriter(jc, tableProperties, progress);
    }

    @Override
    public void checkOutputSpecs(FileSystem arg0, JobConf jc) throws IOException {
    	//TODO do nothing
    }

    @Override
    public RecordWriter<Text, CqlPut> getRecordWriter(FileSystem arg0,JobConf arg1, String arg2, Progressable arg3) throws IOException {
        throw new RuntimeException("Error: Hive should not invoke this method.");
    }
}
