package com.bjsdzk.collector.config;

//import io.vertx.kafka.client.consumer.KafkaConsumer;
//import io.vertx.kafka.client.producer.KafkaProducer;
import io.vertx.rabbitmq.RabbitMQClient;
import io.vertx.rabbitmq.RabbitMQOptions;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import org.springframework.core.env.Environment;

import io.vertx.core.Vertx;
import io.vertx.core.VertxOptions;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.mongo.MongoClient;
import io.vertx.redis.RedisClient;
import io.vertx.redis.RedisOptions;

import java.util.HashMap;
import java.util.Map;

@Configuration
public class VertxFactoryBean {

	private static final Logger logger = LoggerFactory.getLogger(VertxFactoryBean.class);
	
	@Autowired
	private Environment env;
	@Value("${spring.redis.host}")
	private String redisHost;
	@Value("${spring.redis.port}")
	private Integer  redisPort;
	@Value("${spring.redis.password}")
	private String redisPassword;

	@Bean
	@Scope("singleton")
	public Vertx vertx() {
    	VertxOptions options = new VertxOptions(); 
    	logger.info("MaxEventLoopExecuteTime:"+options.getMaxEventLoopExecuteTime()+" EventLoopPoolSize:"+options.getEventLoopPoolSize()
    	+" WorkerPoolSize："+options.getWorkerPoolSize()+" ReceiveBufferSize:"+options.getEventBusOptions().getReceiveBufferSize());
    	//options.setMaxEventLoopExecuteTime(Long.MAX_VALUE);
    	//options.setInternalBlockingPoolSize(options.getInternalBlockingPoolSize()*4);
//    	options.setBlockedThreadCheckInterval(8000);
    	options.getBlockedThreadCheckInterval();
    	options.setEventLoopPoolSize(1000);
    	options.setWorkerPoolSize(1024);
    	options.getEventBusOptions().setReceiveBufferSize(1024*1024);
        Vertx vertx = Vertx.vertx(options);
        logger.info("设置--MaxEventLoopExecuteTime:"+options.getMaxEventLoopExecuteTime()+" EventLoopPoolSize:"+options.getEventLoopPoolSize()
    	+" WorkerPoolSize："+options.getWorkerPoolSize()+" ReceiveBufferSize:"+options.getEventBusOptions().getReceiveBufferSize());
        
		return vertx;
	}
	
	
	@ConditionalOnBean(name = "vertx")
	@Bean
	public RedisClient getRedis(Vertx vertx) {
		RedisOptions config = new RedisOptions().setHost(redisHost).setPort(redisPort);
		if (StringUtils.isNotEmpty(redisPassword)){
			config.setAuth(redisPassword);
		}
		RedisClient redis = RedisClient.create(vertx, config);
		return redis;

	}
	
	@ConditionalOnBean(name = "vertx")
	@Bean
	public MongoClient getMongo(Vertx vertx) {
		MongoClient mongoClient = MongoClient.createShared(vertx, new JsonObject().put("db_name", "sdzk-data")
				.put("connection_string", env.getProperty("spring.data.mongodb.uri")));
		return mongoClient;

	}

//	@ConditionalOnBean(name = "vertx")
//	@Bean
//	public KafkaConsumer<String, String> getKafkaConsumer(Vertx vertx) {
//		Map<String, String> config = new HashMap<>();
//		config.put("bootstrap.servers", "1.119.185.221:9092");
//		config.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//		config.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//		config.put("group.id", "my_group");
//		config.put("auto.offset.reset", "earliest");
//		config.put("enable.auto.commit", "false");
//
//		// use consumer for interacting with Apache Kafka
//		KafkaConsumer<String, String> consumer = KafkaConsumer.create(vertx, config);
//		return consumer;
//	}
//	@ConditionalOnBean(name = "vertx")
//	@Bean
//	public KafkaProducer<String, String> getKafkaProducer(Vertx vertx) {
//		Map<String, String> config = new HashMap<>();
//		config.put("bootstrap.servers", "1.119.185.221:9092");
//		config.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//		config.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//		config.put("acks", "1");
//
//		KafkaProducer<String, String> producer = KafkaProducer.create(vertx, config);
//		return producer;
//	}

	@ConditionalOnBean(name = "vertx")
	@Bean
	public RabbitMQClient getRabbitClient(Vertx vertx) {
		RabbitMQOptions config = new RabbitMQOptions();
//		config.setUri("amqp://rabbit:rabbit@1.119.185.221:5762/my_test");
		config.setUser("rabbit");
		config.setPassword("rabbit");
		config.setHost("1.119.185.221");
		config.setPort(5672);
		config.setVirtualHost("my_test");
		config.setConnectionTimeout(6000);
		config.setRequestedHeartbeat(60);
		config.setHandshakeTimeout(6000);
		config.setRequestedChannelMax(5);
		config.setNetworkRecoveryInterval(500);
		config.setAutomaticRecoveryEnabled(true);
		RabbitMQClient client = RabbitMQClient.create(vertx, config);
		return client;
	}


}


