package com.motu.vertx.module.utility.logsdk;

import com.alibaba.fastjson.JSONObject;
import com.motu.vertx.module.utility.base.BaseServerParam;
import com.motu.vertx.module.utility.base.UtilityConstant;
import com.motu.vertx.module.utility.kafka.KafkaManager;
import com.motu.vertx.module.utility.toolset.Tool;
import io.vertx.core.Vertx;
import io.vertx.core.json.JsonObject;
import io.vertx.kafka.client.producer.KafkaProducer;
import io.vertx.kafka.client.producer.KafkaProducerRecord;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

/**
 * 第三方日志上报管理类
 */
public class LogSDKManager {
	private static final Logger logger = LoggerFactory.getLogger(LogSDKManager.class);


	public static KafkaProducer<String, String> producer;
	public static KafkaProducer<String, String> thinking_producer;// 用于数数
	public static final String KAFKA_TOPIC_REYUN = "log_reyun";
	public static final String KAFKA_TOPIC_ADJUST = "log_adjust";
	public static final String KAFKA_TOPIC_TOUTIAO = "log_toutiao";
	public static final String KAFKA_TOPIC_MOBPUST = "push_mob";
	public static final String KAFKA_TOPIC_FCMPUSH = "push_fcm";
	public static final String KAFKA_TOPIC_THINKINGDATA = "thinking_data";

	public static void init(Vertx vertx, JsonObject appConfig) {

		String kafkaAddress = appConfig.getString("log-kafka-address");
		if (Tool.isEmpty(kafkaAddress)) {
			kafkaAddress = appConfig.getString("kafka-address");
			if (Tool.isEmpty(kafkaAddress)) {
				logger.error("LogSDKManager init failed, address is empty");
				return;
			}
		}

		Properties config = new Properties();
		config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaAddress);
		config.put("key.serializer", StringSerializer.class);
		config.put("value.serializer", StringSerializer.class);
		config.put("acks", "1");
		// 优化kafka生产者配置
		KafkaManager.upgradeProducerConfig(config, appConfig);

		// 创建一个Kafka Producer
		producer = KafkaProducer.create(vertx, config);
		logger.info("sdk log kafka create success address:{}", kafkaAddress);

		// 如果有配置数数kafka地址，则数数单独用一个kafka
		String thinkingAddress = appConfig.getString("thinking-kafka-address");
		if (!Tool.isEmpty(thinkingAddress)) {
			Properties thinking_config = new Properties();
			thinking_config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, thinkingAddress);
			thinking_config.put("key.serializer", StringSerializer.class);
			thinking_config.put("value.serializer", StringSerializer.class);
			thinking_config.put("acks", "1");
			// 优化kafka生产者配置
			KafkaManager.upgradeProducerConfig(thinking_config, appConfig);
			thinking_producer = KafkaProducer.create(vertx, thinking_config);
			logger.info("thinking kafka create success address:{}", thinkingAddress);
		}

	}

	/**
	 * 发送第三方日志
	 *
	 * @param topicName
	 * @param obj
	 */
	public static void pushToQueue(String topicName, JsonObject obj) {
		logger.info("LogSDKManager send topicName:" + topicName + " obj:" + obj.toString());
		KafkaProducerRecord<String, String> record = KafkaProducerRecord.create(topicName, obj.toString());
		producer.send(record, done -> {
			if (done.failed()) {
				logger.error("LogSdk pushDBQueue fail cause = " + Tool.getException(done.cause()));
			} else {
//				logger.info("LogSdk send topicName:"+topicName+" obj:"+obj.toString());
			}
		});
	}

	/***
	 * 发送数数sdk日志
	 * @param topicName
	 * @param obj
	 */
	public static void sendThinkingData(String topicName, JSONObject obj) {
		// ThinkingDataSDK，用的是alibaba.fastjson
		//带上#time参数才能使用logbus发送, 没有设置#time 默认用当前时间
		if (!obj.containsKey(ThinkingDataBaseManager.TIME)) {
			obj.put(ThinkingDataBaseManager.TIME, UtilityConstant.getEventTime(obj.getString(ThinkingDataBaseManager.ZONE_OFFSET), ThinkingDataBaseManager.timeZoneOffset));
		}
		String dataStr = obj.toString();
		if (BaseServerParam.SEND_THINKING_DATA == 0) {
			return;
		} else if (BaseServerParam.SEND_THINKING_DATA == 1) {
			//走kafka队列发送
			KafkaProducerRecord<String, String> record = KafkaProducerRecord.create(topicName, dataStr);
			KafkaProducer<String, String> kafkaProducer;
			if (thinking_producer != null) {
				kafkaProducer = thinking_producer;
			} else {
				kafkaProducer = producer;
			}
			kafkaProducer.send(record, done -> {
				if (done.failed()) {
					logger.error("LogSdk pushDBQueue fail cause = " + Tool.getException(done.cause()));
				} else {
//            		logger.info("LogSdk send topicName:"+topicName+" obj:"+obj.toString());
				}
			});
		} else if (BaseServerParam.SEND_THINKING_DATA == 2) {
			//直接发送
			ThinkingDataBaseManager.send(dataStr);
		}
	}


}
