package dacp.etl.kafka.steam;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Predicate;
import org.apache.kafka.streams.processor.WallclockTimestampExtractor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.codahale.metrics.Counter;
import com.codahale.metrics.Slf4jReporter;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;

import dacp.etl.kafka.hdfs.connect.monitor.MRegistry;

public class S1MMEIN {

	private static final Logger log = LoggerFactory.getLogger(S1MMEIN.class);
	
	private static Map<String, String> workerProps;
	private static String confPath = "conf.properties";
	private static char INX_SPLIT_CHAR = '|';
	private static char RECORD_SPLIT_CHAR = '|';
	private static char RECORD_JOIN_CHAR = '|';
	public static char KEY_JOIN_CHAR = '|';
	public static String DATETIME_FORMAT = "yyyyMMddHHmmss";
	public static String DELETE_CHAR = "f";
	
	private static String CONF_SPLIT_COLUMNS ="split.cols";
	private static String CONF_TODATETIME_COLUMNS ="to.datetime.cols";
	private static String CONF_TODATETIME_FORMAT ="to.datetime.fmt";
	private static String CONF_DELETE_CHAR_COLUMNS ="del.char.cols";
	private static String CONF_TRIM_CHAR_COLUMNS ="trim.cols";
	private static String CONF_DELETE_CHAR ="del.char";
	private static String CONF_KEY_COLUMNS ="key.cols";


	private static String CONF_FILTER_CHAR_COLUMNS ="filter.cols";

	
	private static String CONF_INX_SPLIT_CHAR ="inx.split.char";
	private static String CONF_RECORD_SPLIT_CHAR ="record.split.char";
	private static String CONF_RECORD_JOIN_CHAR ="record.join.char";
	private static String CONF_KEY_JOIN_CHAR ="key.join.char";
	
	public static Splitter spliter = Splitter.on(RECORD_SPLIT_CHAR);
	public static Splitter spliterInx;
	public static Joiner joiner = Joiner.on(RECORD_JOIN_CHAR);
	public static Joiner keyJoiner = Joiner.on(KEY_JOIN_CHAR);

	public static Splitter fliterSpliter = Splitter.on(RECORD_JOIN_CHAR);

	//split
	private boolean needSplit = false;
	private Integer[] splitCols;
	
	//trans
	private boolean needTrans = false;
	private Integer[] transCols;
 	
	//delete 
	private boolean needDel = false;
	private Integer[] delCols;

	//delete
	private boolean needTrim = false;
	private Integer[] trimCols;

	private boolean needFliter = false;
	private Integer[] fliterCols;
	private Integer[] fliterInx;
	
	//define key
	private boolean needKey = false;
	public static Integer[] keyCols ; 
	public static HashSet<Integer> keyColsSet = Sets.newHashSet();
	
	private static String CONF_GROUP_ID = "group.id";
	private static String CONF_SERVER = "broker.list";
	private static String CONF_TOPIC_IN = "topic.in";
	private static String CONF_TOPIC_OUT = "topic.out";
	private static String CONF_THREAD_NUM = "thread.num";

	private static String CONF_PARTITIONER_CLASS = "partitioner.class";
	
	private static String DEFAULT_THREAD_NUM = "10";
	
	private Counter c1 = MRegistry.get().counter("TransCount");
	private Counter cerr = MRegistry.get().counter("TransErrCount");
 

	private void init(){
		String confInxSC = workerProps.get(CONF_INX_SPLIT_CHAR);
		if(!Strings.isNullOrEmpty(confInxSC)) INX_SPLIT_CHAR = confInxSC.charAt(0);
		log.info("param value {} : {}", CONF_INX_SPLIT_CHAR, confInxSC);
		
		String confRecordSC = workerProps.get(CONF_RECORD_SPLIT_CHAR);
		if(!Strings.isNullOrEmpty(confRecordSC)) RECORD_SPLIT_CHAR = confRecordSC.charAt(0);
		log.info("param value {} : {}", CONF_RECORD_SPLIT_CHAR, RECORD_SPLIT_CHAR);
		
		String confRecordJC = workerProps.get(CONF_RECORD_JOIN_CHAR);
		if(!Strings.isNullOrEmpty(confRecordJC)) RECORD_JOIN_CHAR = confRecordJC.charAt(0);
		log.info("param value {} : {}", CONF_RECORD_JOIN_CHAR, RECORD_JOIN_CHAR);
		
		String confKeyJC = workerProps.get(CONF_KEY_JOIN_CHAR);
		if(!Strings.isNullOrEmpty(confKeyJC)) KEY_JOIN_CHAR = confKeyJC.charAt(0);
		log.info("param value {} : {}", CONF_KEY_JOIN_CHAR, KEY_JOIN_CHAR);
		 
		
		spliterInx = Splitter.on(INX_SPLIT_CHAR);
		spliter = Splitter.on(RECORD_SPLIT_CHAR);
		joiner = Joiner.on(RECORD_JOIN_CHAR);

		fliterSpliter = Splitter.on(RECORD_JOIN_CHAR);
		
		keyJoiner = Joiner.on(KEY_JOIN_CHAR);
		
		String splitColumnsStr = workerProps.get(CONF_SPLIT_COLUMNS); 
		if (!Strings.isNullOrEmpty(splitColumnsStr)) {
			needSplit = true;
			splitCols = transInx(spliterInx.split(splitColumnsStr));
			log.info("param value {} : {}", CONF_SPLIT_COLUMNS, splitColumnsStr);
		}
		
		String toDateStr = workerProps.get(CONF_TODATETIME_COLUMNS); 
		if (!Strings.isNullOrEmpty(toDateStr)) {
			needTrans = true;
			transCols = transInx(spliterInx.split(toDateStr));
			log.info("param value {} : {}", CONF_TODATETIME_COLUMNS, toDateStr);
			
			String dateFmt = workerProps.get(CONF_TODATETIME_FORMAT);
			if(!Strings.isNullOrEmpty(dateFmt)) DATETIME_FORMAT = dateFmt;
			
			log.info("param value {} : {}", CONF_TODATETIME_FORMAT, DATETIME_FORMAT);
		}
		
		String delStr = workerProps.get(CONF_DELETE_CHAR_COLUMNS); 
		if (!Strings.isNullOrEmpty(delStr)) {
			needDel = true;
			delCols = transInx(spliterInx.split(delStr));
			log.info("param value {} : {}", CONF_DELETE_CHAR_COLUMNS, delStr);

			String deleC = workerProps.get(CONF_DELETE_CHAR);
			if(!Strings.isNullOrEmpty(deleC)) DELETE_CHAR = deleC;
			
			log.info("param value {} : {}", CONF_DELETE_CHAR, DELETE_CHAR);
		}


		String trimStr = workerProps.get(CONF_TRIM_CHAR_COLUMNS);
		if (!Strings.isNullOrEmpty(trimStr)) {
			needTrim = true;
			trimCols = transInx(spliterInx.split(trimStr));
			log.info("param value {} : {}", CONF_TRIM_CHAR_COLUMNS, trimStr);
		}


		String fliterStr = workerProps.get(CONF_FILTER_CHAR_COLUMNS);
		if (!Strings.isNullOrEmpty(fliterStr)) {
			fliterCols = transInx(spliterInx.split(fliterStr));
			log.info("param value {} : {}", CONF_FILTER_CHAR_COLUMNS, fliterStr);

			ArrayList<Object> list = Lists.newArrayList();
			HashSet<Integer> set = Sets.newHashSet(fliterCols);
			for(int i=0; i<splitCols.length; i++){
				if(set.contains(splitCols[i])){
					list.add(i);
				}
			}
			fliterInx = list.toArray(new Integer[0]);
			if(fliterInx.length > 0){
				needFliter = true;
			}
		}

		
		String keyStr = workerProps.get(CONF_KEY_COLUMNS); 
		if (!Strings.isNullOrEmpty(keyStr)) {
			needKey = true;
			keyCols = transInx(spliterInx.split(keyStr));
			keyColsSet =  Sets.newHashSet(Arrays.asList(keyCols)); 
 			log.info("param value {} : {} ", CONF_KEY_COLUMNS, keyStr); 
		}
		
	}
	
	private Integer[] transInx(Iterable<String> inx){
		List<Integer> list = Lists.newArrayList();
		Iterator<String> iterator = inx.iterator();
		while(iterator.hasNext()){
			String next = iterator.next();
			list.add(Integer.parseInt(next));
		} 
		return list.toArray(new Integer[0]);
	}
	 
	public S1MMEIN() {
		
		try {
			workerProps = !confPath.isEmpty() ? Utils.propsToStringMap(Utils.loadProps(confPath))
					: Collections.<String, String>emptyMap();
			
			init();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} 
	}

	public void initKafkaStreams() {
		 
		String applicationId = workerProps.get(CONF_GROUP_ID);
		String bootstrapServers = workerProps.get(CONF_SERVER);
		String topicIn = workerProps.get(CONF_TOPIC_IN);
		String topicOut = workerProps.get(CONF_TOPIC_OUT);
		
		String threadNum = workerProps.get(CONF_THREAD_NUM);
		
		String partitionerClass = workerProps.get(CONF_PARTITIONER_CLASS);
		
		log.info("param value {} : {}", CONF_GROUP_ID, applicationId);
		log.info("param value {} : {}", CONF_SERVER, bootstrapServers);
		log.info("param value {} : {}", CONF_TOPIC_IN, topicIn);
		log.info("param value {} : {}", CONF_TOPIC_OUT, topicOut);
		log.info("param value {} : {}", CONF_THREAD_NUM, threadNum);
		log.info("param value {} : {}", CONF_PARTITIONER_CLASS, partitionerClass);
		
		Preconditions.checkNotNull(applicationId); 
		Preconditions.checkNotNull(bootstrapServers); 
		Preconditions.checkNotNull(topicIn); 
		Preconditions.checkNotNull(topicOut); 
		
		if(!Strings.isNullOrEmpty(threadNum)){
			DEFAULT_THREAD_NUM = threadNum;
		}
		log.info("param value {} : {}", CONF_THREAD_NUM, DEFAULT_THREAD_NUM);
		

		Properties props = new Properties();
		props.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
		props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
		props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
		props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
		props.put(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
		props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, DEFAULT_THREAD_NUM);
		
		if(!Strings.isNullOrEmpty(partitionerClass)){
			props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, partitionerClass);
		}

		KStreamBuilder builder = new KStreamBuilder();

		KStream<String, String> stream = builder.stream(topicIn);

		KeyValueMapper<String, String, KeyValue<String, ArrayList<String>>> mapper =
				new KeyValueMapper<String, String, KeyValue<String, ArrayList<String>>>() {
			private Chain chain;
					{
						if(needSplit){
							Chain.setS1(splitCols);
						}
						if(needTrans){
							Chain.setS2(transCols);
						}
						if(needDel){
							Chain.setS3(delCols);
						}
						if(needTrim){
							Chain.setS4(trimCols);
						}
						chain = Chain.build();
					}
			@Override
			public KeyValue<String, ArrayList<String>> apply(String key, String value) {
				if (value != null) {
					try{ 
						
						/*
						if(c1.getCount() < 10){ 
							log.info("AFTER  :"+ process);
						}
						*/
						String newKey = key;
						ArrayList<String> val = null;
						if( !needKey ){
							val = chain.process(value);
 						} else {
 							Object[] keyVal = chain.processPair(value);
 							newKey = (String)keyVal[0];
 							val = (ArrayList<String>)keyVal[1];
 						}
						c1.inc();
						return new KeyValue<>(newKey, val);
					}catch(Exception e){ 
						cerr.inc();
					} 
				}
				return null; 
			}
		};

		KStream<String, ArrayList<String>> map = stream.map(mapper);

		if(needFliter){
			map = map.filter(new Predicate<String, ArrayList<String>>() {
				@Override
				public boolean test(String key, ArrayList<String> value) {
					int size = value.size();
					for(int inx: fliterInx){
						if(size > inx && Strings.isNullOrEmpty(value.get(inx))){
							return true;
						}
					}
					return false;
				}
			});
		}

		map.map(new KeyValueMapper<String, ArrayList<String>, KeyValue<String, String>>(){
			@Override
			public KeyValue<String, String> apply(String key, ArrayList<String> value) {
				return new KeyValue<>(key, joiner.join(value));
			}
		}).to(topicOut);

		final KafkaStreams streams = new KafkaStreams(builder, props);
		streams.start();

		Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
			@Override
			public void run() {
				streams.close();
			}
		}));
	}

	public static void main(String[] args) {
		if (args.length > 0) {
			confPath = args[0];
		}
		final Slf4jReporter reporter = Slf4jReporter.forRegistry(MRegistry.get())
                .outputTo(LoggerFactory.getLogger("dacp.etl.kafka.stream.Monitor"))
                .convertRatesTo(TimeUnit.SECONDS)
                .convertDurationsTo(TimeUnit.MILLISECONDS)
                .build();
		reporter.start(60, TimeUnit.SECONDS); 
		
		new S1MMEIN().initKafkaStreams();
	}
}
