package com.alinesno.cloud.base.logger.consumer;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Optional;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Component;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alinesno.cloud.base.logger.entity.LogSqlEntity;
import com.alinesno.cloud.base.logger.service.ILogSqlService;
import com.alinesno.cloud.monitor.agent.aspect.BaseAspect;

/**
 * 异常消息监听
 * 
 * @author WeiXiaoJin
 * @since 2020年1月25日 下午19:45:08
 */
@Component
class MontorSQLConsumerListener extends BaseConsumer {

	private static final Logger log = LoggerFactory.getLogger(MontorSQLConsumerListener.class);

	@Autowired
	private ILogSqlService logSqlService ;

	/**
	 * 服务状态监听
	 * 
	 * @param record
	 * @param data
	 * @param partition
	 * @param topic
	 * @param ts
	 */
	@KafkaListener(topics = { BaseAspect.WATCHER_TOPIC_SQL })
	public void listenLogger(ConsumerRecord<?, ?> record, @Payload String data,
			@Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
			@Header(KafkaHeaders.RECEIVED_TOPIC) String topic, @Header(KafkaHeaders.RECEIVED_TIMESTAMP) long ts) {

		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent()) {

			Object message = kafkaMessage.get();
		
			log.debug("---------------->>>>  message = topic：" + topic + ", " + message);
			
			if (message != null) {
				buildStorageMessage(message + "");
			}
		}
	}

	/**
	 * 保存基础信息
	 * 
	 * @param string
	 */
	private void buildStorageMessage(String text) {
	
		JSONObject json = JSONObject.parseObject(text) ; 
		List<LogSqlEntity> list = new ArrayList<LogSqlEntity>() ; 
		
		String applicationName = json.getString(APPLICATIONNAME) ; 
		String ip = json.getString(IP) ; 
		String hostname = json.getString(HOSTNAME) ; 
		String dbType = json.getString("dbType") ; 
		String dbUrl = json.getString("url") ; 
		
		// 获取传递过来的sql
		JSONArray arr = json.getJSONArray("sqlList") ;
		if(arr != null) {
			for(Object o : arr) {
				JSONObject obj = JSONObject.parseObject(o + "") ; 
				LogSqlEntity e = new LogSqlEntity() ; 
		   
				String sqlContent = obj.getString("sql") ;
				String sqlOut = sqlContent.replaceAll("\\/\\/[^\\n]*|\\/\\*([^\\*^\\/]*|[\\*^\\/*]*|[^\\**\\/]*)*\\*\\/", "");
				System.out.println(sqlOut);
				
				int executeCount = obj.getIntValue("executeCount") ;
				int executeMillisMax = obj.getIntValue("executeMillisMax") ;
				int executeMillisTotal = obj.getIntValue("executeMillisTotal") ;
				int concurrentMax = obj.getIntValue("concurrentMax") ;
				int updateCount = obj.getIntValue("updateCount") ;
				// int updateCountMax = obj.getIntValue("updateCountMax") ;
				int inTransactionCount = obj.getIntValue("inTransactionCount") ;
				int fetchRowCount = obj.getIntValue("fetchRowCount") ;
			
				e.setApplicationName(applicationName);
				e.setIp(ip);
				e.setHostname(hostname);
				
				e.setSqlContent(sqlOut); 
				e.setActiveCount(executeCount);
				e.setExecuteMillisMax(executeMillisMax+"");
				e.setExecuteMillisTotal(executeMillisTotal+"");
				e.setUpdateCount(updateCount);
				e.setTransationCount(inTransactionCount);
				e.setThreadCount(concurrentMax);
				e.setResultCount(fetchRowCount);
				e.setDbType(dbType);
				e.setDbUrl(dbUrl);
			
				e.setAddTime(new Date());
				
				list.add(e) ; 
			}
			
			logSqlService.saveAll(list) ; 
		}
	}

}






































