package io.confluent.examples.streams;

import java.io.IOException;
import java.util.Collections;
import java.util.Properties;
import java.util.Random;
import java.util.stream.IntStream;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import io.confluent.examples.streams.avro.WikiFeed;
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;

public class WikipediaFeedAvroExampleDriver {
	static final String WIKIPEDIA_FEED = "WikipediaFeed";
	static final String WIKIPEDIA_STATS = "WikipediaStats";

	public static void main(String[] args) throws IOException {
		final String bootstrapServers = args.length > 0 ? args[0] : "10.100.189.30:9092";
		final String schemaRegistryUrl = args.length > 1 ? args[1] : "http://10.100.189.30:8081";
		produceInputs(bootstrapServers, schemaRegistryUrl);
		consumeOutput(bootstrapServers, schemaRegistryUrl);
	}

	private static void produceInputs(String bootstrapServers, String schemaRegistryUrl) throws IOException {
		final String[] users = { "erica", "bob", "joe", "damian", "tania", "phil", "sam", "lauren", "joseph" };
		final Properties props = new Properties();
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
				io.confluent.kafka.serializers.KafkaAvroSerializer.class);
		props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
		final KafkaProducer<String, WikiFeed> producer = new KafkaProducer<>(props);

		final Random random = new Random();

		IntStream.range(0, random.nextInt(100))
				.mapToObj(value -> new WikiFeed(users[random.nextInt(users.length)], true, "content"))
				.forEach(record -> producer.send(new ProducerRecord<String, WikiFeed>(WIKIPEDIA_FEED, null, record)));

		producer.flush();

		producer.close();
	}

	private static void consumeOutput(String bootstrapServers, String schemaRegistryUrl) {
		final Properties properties = new Properties();
		properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100);
		properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
		properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
		properties.put(ConsumerConfig.GROUP_ID_CONFIG, "wikipedia-feed-example-consumer");
		properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

		final KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(properties, new StringDeserializer(),
				new LongDeserializer());

		consumer.subscribe(Collections.singleton(WIKIPEDIA_STATS));
		while (true) {
			final ConsumerRecords<String, Long> consumerRecords = consumer.poll(Long.MAX_VALUE);
			for (final ConsumerRecord<String, Long> consumerRecord : consumerRecords) {
				System.out.println(consumerRecord.key() + "=" + consumerRecord.value());
			}
		}
	}

}
