package com.huatai.datacenter.controller;

import com.alibaba.fastjson.JSONArray;
import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport;
import com.huatai.common.api.R;
import com.huatai.datacenter.constant.Constants;
import com.huatai.datacenter.entity.TopicInfoEntity;
import com.huatai.datacenter.entity.messagequeuemonitor.LagRecordQueryDTO;
import com.huatai.datacenter.entity.messagequeuemonitor.LagStatVO;
import com.huatai.datacenter.entity.messagequeuemonitor.PageResult;
import com.huatai.datacenter.service.KafkaManagerService;
import com.huatai.datacenter.service.KafkaMonitorService;
import com.huatai.datacenter.service.TopicInfoService;
import com.rabbitmq.client.*;
import io.swagger.annotations.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;

/**
 * 主题管理
 *
 * @author Lion
 * @date 2023/3/29  10:43
 */
@Slf4j
@RequestMapping("/topic")
@RestController
@Api(value = "Kafka中的主题管理", tags = "Kafka中的主题管理")
public class TopicController {

	@Autowired
	private KafkaManagerService kafkaManagerService;

	@Autowired
	private TopicInfoService topicInfoService;

	// 2023年8月7日 换成了有broker字段的
	@Autowired
	private KafkaMonitorService kafkaMonitorService; // 监控的

	/**
	 * @param * @param clusterId
	 * @return com.huatai.common.api.R
	 * @author Lion
	 * @description 获取所有主题
	 */
	@ApiOperation("根据集群id获取所有主题")
	@ApiOperationSupport(includeParameters = {
		"cluster"
	})
	@GetMapping("/list")
	public R topicList(@RequestParam("cluster") String clusterId) {
		try {
			return R.data(kafkaManagerService.topicList(clusterId));
		} catch (Exception e) {
			log.error("get topic config error,", e);
		}
		return R.fail("get topic config error!Please check");
	}

	/**
	 * @param * @param queryMap
	 * @return com.huatai.common.api.R
	 * @author Lion
	 * @description 删除主题
	 */
	@ApiOperation("根据集群id和主题名称删除主题")
	@ApiImplicitParams({
		@ApiImplicitParam(name = "clusterId", value = "集群id", dataType = "long"),
		@ApiImplicitParam(name = "topicName", value = "主题名称")
	})
	@PostMapping("/deleteTopic")
	public R deleteTopic(@RequestBody Map<String, String> queryMap) {
		try {
			String clusterId = queryMap.get(Constants.KeyStr.LOWER_CLUSTER_ID);
			String topicName = queryMap.get(Constants.KeyStr.TOPICNAME);
			if (kafkaManagerService.deleteTopic(clusterId, topicName)) {
				return R.success("success");
			}
		} catch (Exception e) {
			log.error("Delete Topic failed,", e);
		}
		return R.fail("Delete Topic failed!");
	}

	/**
	 * @param * @param topicInfoEntity
	 * @return com.huatai.common.api.R
	 * @author Lion
	 * @description 创建主题
	 */
	@ApiOperation("创建主题")
	@ApiOperationSupport(includeParameters = {
		"TopicInfoEntity.topicName",
		"TopicInfoEntity.partition",
		"TopicInfoEntity.replication",
		"TopicInfoEntity.ttl",
		"TopicInfoEntity.clusterId",
		"TopicInfoEntity.comments"
	})
	@PostMapping("/createTopic")
	public R createTopic(@RequestBody TopicInfoEntity topicInfoEntity) {

		try {
			topicInfoService.kafkaAdminCreateTopic(topicInfoEntity);
		} catch (Exception e) {
			e.printStackTrace();
			return R.fail("error");
		}
		return R.success("success");
	}

	@ApiOperation("获取主题数据")
	@ApiImplicitParams({
		@ApiImplicitParam(name = "waitTime", value = "延迟时间"),
		@ApiImplicitParam(name = "groupID", value = "分组id"),
		@ApiImplicitParam(name = "recordNum", value = "消费数据量"),
		@ApiImplicitParam(name = "isCommit", value = "是否提交偏移量"),
		@ApiImplicitParam(name = "isByPartition", value = "是否提交分区"),
		@ApiImplicitParam(name = "clusterID", value = "集群id"),
		@ApiImplicitParam(name = "offset", value = "偏移量（默认为空）"),
		@ApiImplicitParam(name = "partition", value = "分区"),
		@ApiImplicitParam(name = "topic_name", value = "主题名称")
	})
	@PostMapping("/queryTopicData")
	public R consumer(@RequestBody Map<String, String> json) {
		JSONArray jsonArray = topicInfoService.queryTopicData(json);
		return R.data(jsonArray);
	}

	@PostMapping("/sendDataToTopic")
	@ApiOperation("发送数据示例")
	@ApiImplicitParams({
		@ApiImplicitParam(name = "clusterID", value = "集群id"),
		@ApiImplicitParam(name = "key", value = "key"),
		@ApiImplicitParam(name = "value", value = "value"),
		@ApiImplicitParam(name = "topicName", value = "主题名称")
	})
	public R sendData(@RequestBody Map<String, String> json) {
		try {
			String clusterId = json.get(Constants.KeyStr.CLUSTERID);
			String topicName = json.get(Constants.KeyStr.TOPICNAME);
			String key = json.get(Constants.JsonObject.KEY);
			String value = json.get(Constants.JsonObject.VALUE);

			topicInfoService.sendData(clusterId, topicName, key, value);
			return R.data("Send data success");

		} catch (ExecutionException | InterruptedException e) {
			return R.data("Send data error");
		}
	}

	@ApiOperation("获取kafka中数据大小")
	@GetMapping("/data")
	public R testData(@RequestParam("clusterId") String clusterId) {
		Map<String, Long> map = new HashMap<>();
		try {
			map = kafkaManagerService.takeTopicSize(clusterId);
		} catch (Exception e) {
			e.printStackTrace();
		}
		return R.data(map);
	}

	@ApiOperation("获取RabbitMQ")
	@GetMapping("/testRabbitMQ")
	public R testRabbitMQData() {
		asyncGet("directExchange750");
		return R.data("");
	}


	/*===========
	监控堆积的
	=============*/

	/**
	 * 获取所有堆积消息 Kafka
	 * 原理：直接读取缓存，为空也直接读
	 * FIXME: 要根据实际生成时间调整定时任务间隔时间！
	 *
	 * @param clusterID 选中的集群id
	 * @return 该clusterID的主题情况
	 * @author lizhong
	 * @date 2023年8月4日
	 */
	@ApiOperation("获取所有主题消费情况的列表，首页展示(Kafka的!)")
	@GetMapping("/monitor/kafka")
	public R<PageResult> list(@ApiParam(required = true) @RequestParam String clusterID) {
		try {
			log.info("首页展示所有主题消费情况");
			// 2023年8月2日09点48分：直接返回，不循环等待，为空也返回
			List<LagStatVO> lagStatVOListCache = kafkaMonitorService.getLagStatVOListCache(clusterID);
			log.info((lagStatVOListCache.size() == 0) ?
				"读到的所有主题消费情况缓存为空，定时任务未完成" :
				"读到的所有主题消费情况缓存正常，定时任务完成");
			return R.data(new PageResult(lagStatVOListCache.size(), lagStatVOListCache));
		} catch (Exception e) {
			log.error("获取所有主题消费情况的列表错误, ", e);
		}
		return R.fail("获取集群" + clusterID + "所有主题消费情况的列表错误! 可能是远程kafka服务器的问题");
	}

	/**
	 * 获取一个(topic, partition, group)下的所有堆积消息 Kafka
	 *
	 * @param lagRecordQueryDTO (topic, broker, consumerGroup)
	 * @return 所有堆积消息
	 * @author lizhong
	 */
	@ApiOperation("查看堆积详情按钮，获取所有堆积消息 Kafka")
	@GetMapping("/monitor/getLagRecordKafka")
	public R<PageResult> getLagRecordListKafka(LagRecordQueryDTO lagRecordQueryDTO) {
		try {
			log.info("堆积消息查询: {}", lagRecordQueryDTO);
			return R.data(kafkaMonitorService.getLagRecordListKafka(lagRecordQueryDTO));
		} catch (Exception e) {
			log.error("获取堆积消息错误, ", e);
		}
		return R.fail("获取堆积消息错误! Please check");
	}


	private static void asyncGet(String exchangeName) {
		try {
			String workerName = Thread.currentThread().getName();
			Connection connection = getConnection();
			Channel channel = connection.createChannel();
			//GetResponse getResponse = channel.basicGet("hello", true);
			channel.exchangeDeclare(exchangeName, "direct", true);
			String queueName = channel.queueDeclare().getQueue();
			channel.queueBind(queueName, exchangeName, "track_data");

			/*DefaultConsumer defaultConsumer = new DefaultConsumer(channel);
			String data = channel.basicConsume(queueName,defaultConsumer);
			System.out.println("获取到的数据为：" + data);*/
			channel.basicConsume(queueName,
				true,
				new DefaultConsumer(channel) {
					@Override
					public void handleConsumeOk(String consumerTag) {
						System.out.printf("消费者%s - 启动成功%n", workerName);
					}

					@Override
					public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
						System.out.printf("消息者%s - 获取消息：%s,路由：%s%n", workerName, new String(body), envelope.getRoutingKey());
					}
				});
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	private static Connection connection;

	//获取连接
	public static Connection getConnection() throws Exception {
		if (null == connection) {
			ConnectionFactory factory = new ConnectionFactory();
			//服务地址
			factory.setHost("192.168.2.30");
			//服务所在端口，不填默认5672
			factory.setPort(5672);
			//管理员账号
			factory.setUsername("htzy");
			//管理员密码
			factory.setPassword("htzy");
			//虚拟机
			factory.setVirtualHost("my_vhost");
			connection = factory.newConnection();
		}
		return connection;
	}

}
