/**
 * 
 */
package com.dangdang.kafaka;

import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicLong;

import javax.annotation.Resource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.aop.support.AopUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.core.annotation.Order;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import com.dangdang.annotation.KafkaMqConsumer;
import com.dangdang.task.ProductPriceChangeMessageConsumer;

/**
 *
 * @author ligeng 2019年6月27日
 */
@Component
@Order
public class KafkaAutoConfiguration implements ApplicationContextAware
,ApplicationListener<ContextRefreshedEvent >
{

	private static Logger logger = LoggerFactory.getLogger(KafkaAutoConfiguration.class);
	private ConfigurableApplicationContext applicationContext;

	private AtomicLong counter = new AtomicLong(0);

	@Resource
	private StandardEnvironment environment;

	@Resource
	private KafkaProperties kafkaProperties;

	@Override
	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {

		this.applicationContext = (ConfigurableApplicationContext) applicationContext;
	}

	/* (non-Javadoc)
	 * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
	 */
	@Override
	public void onApplicationEvent(ContextRefreshedEvent event) {
       if(event.getApplicationContext().getParent()==null){  
			Map<String, Object> beans = this.applicationContext.getBeansWithAnnotation(KafkaMqConsumer.class);
			if (Objects.nonNull(beans)) {
				for (Entry<String, Object> entry : beans.entrySet()) {
					registerContainer(entry.getKey(), entry.getValue());
				}
			}
       }
	}

	public void registerContainer(String beanName, Object bean) {
		Class<?> clazz = AopUtils.getTargetClass(bean);
		if (!MessageListener.class.isAssignableFrom(bean.getClass())) {
			throw new IllegalStateException(clazz + " is not instance of " + MessageListener.class.getName());
		}
		MessageListener messageListener = (MessageListener) bean;
		KafkaMqConsumer annotation = clazz.getAnnotation(KafkaMqConsumer.class);
		validate(annotation);
		BeanDefinitionBuilder beanBuilder = BeanDefinitionBuilder
				.rootBeanDefinition(DefaultKafkaListenerContainer.class);
		String bootStrapServices = annotation.bootStrapServices();
		KafkaProperties properties = new KafkaProperties();
		BeanUtils.copyProperties(kafkaProperties, properties);
		if (!StringUtils.isEmpty(bootStrapServices)) {
			String servers = environment.resolvePlaceholders(bootStrapServices);
			if (!StringUtils.isEmpty(bootStrapServices)) {
				properties.setBootstrapServers(servers);
			}
		}
		if (StringUtils.isEmpty(annotation.topic())) {
			throw new IllegalArgumentException("topic is Empty");
		}
		String topics = environment.resolvePlaceholders(annotation.topic());
		if (!StringUtils.isEmpty(topics)) {
			beanBuilder.addPropertyValue(DefaultKafkaListenerContainerConstants.PROP_TOPICS, topics.split(";"));
		} else {
			beanBuilder.addPropertyValue(DefaultKafkaListenerContainerConstants.PROP_TOPICS,
					annotation.topic().split(";"));
		}
		if (!StringUtils.isEmpty(annotation.threads())) {
			beanBuilder.addPropertyValue(DefaultKafkaListenerContainerConstants.PROP_CONCURRENCY,
					Integer.valueOf(annotation.threads()));
		}
		beanBuilder.addPropertyValue(DefaultKafkaListenerContainerConstants.PROP_KAFKA_PROPERTIES, properties);
		beanBuilder.addPropertyValue(DefaultKafkaListenerContainerConstants.PROP_KAFKA_MESSAGELISTENER,
				messageListener);
		beanBuilder.setDestroyMethodName(DefaultKafkaListenerContainerConstants.METHOD_DESTROY);
		String containerBeanName = String.format("%s_%s", DefaultKafkaListenerContainer.class.getName(),
				counter.incrementAndGet());
		DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) applicationContext.getBeanFactory();
		beanFactory.registerBeanDefinition(containerBeanName, beanBuilder.getBeanDefinition());
		DefaultKafkaListenerContainer container = beanFactory.getBean(containerBeanName,
				DefaultKafkaListenerContainer.class);

		if (!container.isStart()) {
			try {
				container.start();
			} catch (Exception e) {
				logger.error("kafka started container failed. {}", container, e);
				throw new RuntimeException(e);
			}
		}
	}

	private void validate(KafkaMqConsumer annotation) {
		if (StringUtils.isEmpty(annotation.topic()))
			throw new BeanDefinitionValidationException("Bad annotation definition in @KafkaMqConsumer,topic isblank!");
	}


}
