package com.roy.KafkaTest.stream.myexam;

import java.util.Arrays;
import java.util.Locale;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Printed;

public class MyStremDemo1 {
	public static final String INPUT_TOPIC = "streams-plaintext-input";
	public static final String OUTPUT_TOPIC = "streams-wordcount-output";
	private static final String BOOTSTRAP_SERVERS = "172.16.48.10:9092,172.16.48.11:9092,172.16.48.12:9092";
	private static final String TOPIC = "topic1";
	
	static Properties getStreamsConfig() {
		final Properties props = new Properties();
		props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount");
		props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
		props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
		props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "500");
		props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG,
				Serdes.String().getClass().getName());
		props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,
				Serdes.String().getClass().getName());

		// setting offset reset to earliest so that we can re-run the demo code
		// with the same pre-loaded data
		// Note: To re-run the demo, you need to use the offset reset tool:
		// https://cwiki.apache.org/confluence/display/KAFKA/Kafka+Streams+Application+Reset+Tool
//		props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
//		props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
		return props;
	}
	
	public static void main(final String[] args) {
		final Properties props = getStreamsConfig();

		final StreamsBuilder builder = new StreamsBuilder();
		
		KStream<String, String> textLine = builder.stream(TOPIC);
		
		KTable<String, Long> wordCounts = textLine.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split(" ")))
				.groupBy((key, word) -> word).count();
		
		wordCounts.toStream().foreach((key,value) -> {System.out.println(key+" ----- "+value);});
//		wordCounts.toStream().print(Printed.toSysOut());
		
		final KafkaStreams streams = new KafkaStreams(builder.build(), props);
		final CountDownLatch latch = new CountDownLatch(1);

		// attach shutdown handler to catch control-c
		Runtime.getRuntime()
				.addShutdownHook(new Thread("streams-wordcount-shutdown-hook") {
					@Override
					public void run() {
						streams.close();
						latch.countDown();
					}
				});

		try {
			streams.start();
			latch.await();
		} catch (final Throwable e) {
			System.exit(1);
		}
		System.exit(0);
	}
	
}
