package org.eking.bigdata.kafka;

import java.util.Arrays;
import java.util.Properties;
 
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class kafkastream implements Processor<String, String>{

	private ProcessorContext context;
	private KeyValueStore<String, Long> kvStore;
	static final Logger logger = LoggerFactory.getLogger(kafkastream.class);
//	public static void TestStream() {
//		
//		Properties props = new Properties();
//		props.put("bootstrap.servers", "10.71.200.109:9092");
//		props.put("application.id", "test1");
//		
//		// 序列化/反序列化Sting和Long类型
//		final Serde<String> stringSerde = Serdes.String();
//		final Serde<Long> longSerde = Serdes.Long();
//		KStreamBuilder builder = new KStreamBuilder();  
//		// 通过指定输入topic “streams-file-input”来构造KStream实例，
//		// 输入数据就以文本的形式保存在topic “streams-file-input” 中。
//		// (在本示例中，我们忽略所有消息的key.)
//		KStream<String, String> textLines = builder.stream(stringSerde, stringSerde, "test");
////		KStream source1 = builder.stream("topic1", "topic2");  
////		KTable source2 = builder.table("topic3");  
//		KStream<String, Long> wordCounts = textLines
//
//				.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
//
//				.map((key, value) -> new KeyValue<>(value, value))
//
////				.countByKey(stringSerde, "Counts")
//
//				.toStream();
//
//		// 将KStream<String,Long>写入到输出topic中。
//		wordCounts.to(stringSerde, longSerde, "streams-wordcount-output");
//	}

	@Override
	@SuppressWarnings("unchecked")
	public void init(ProcessorContext context) {
		// TODO Auto-generated method stub
		// keep the processor context locally because we need it in punctuate() and commit()
		this.context = context;
		 
		// call this processor's punctuate() method every 1000 milliseconds.
		this.context.schedule(1000);
		 
		// retrieve the key-value store named "Counts"
		this.kvStore = (KeyValueStore<String, Long>) context.getStateStore("counts");
	}

	@Override
	public void process(String key, String line) {
		// TODO Auto-generated method stub
		System.out.println("got it\n");
		String[] words = line.toLowerCase().split(" ");
		 
		for (String word : words) {
		    Long oldValue = this.kvStore.get(word);
		 
		    if (oldValue == null) {
		        this.kvStore.put(word, 1L);
		    } else {
		        this.kvStore.put(word, oldValue + 1L);
		    }
		    logger.error(word + oldValue.toString());
		}
	}

	@Override
	public void punctuate(long timestamp) {
		// TODO Auto-generated method stub
		KeyValueIterator<String, Long> iter = this.kvStore.all();
		 
		while (iter.hasNext()) {
		    KeyValue<String, Long> entry = iter.next();
		    context.forward(entry.key, entry.value.toString());
		}
		 
		iter.close();
		// commit the current processing progress
		context.commit();
		System.out.println("send it\n");
	}

	@Override
	public void close() {
		// TODO Auto-generated method stub
		
	}
}
