package com.sdp.irrigation.storage.kafka.consumer;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.listener.ConsumerSeekAware;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import com.sdp.irrigation.bean.BeanFactory;
import com.sdp.irrigation.kafka.biz.impl.HeartBiz;
import com.sdp.irrigation.kafka.biz.impl.LoginBiz;
import com.sdp.irrigation.kafka.biz.impl.QueryBiz;
import com.sdp.irrigation.kafka.biz.impl.ReportBiz;
import com.sdp.irrigation.kafka.biz.impl.SetBiz;
import com.sdp.irrigation.kafka.biz.impl.UserBiz;
import com.sdp.irrigation.kafka.biz.impl.VersionBiz;
import com.sdp.irrigation.protocol.CommPackage;
import com.sdp.irrigation.utils.JsonUtils;

import cn.hutool.core.codec.Base64;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@Component
public class KafkaBatchConsumer implements ConsumerSeekAware {
//	private ConsumerSeekCallback seekCallback;
//	
//	@Override
//    public void registerSeekCallback(ConsumerSeekCallback callback) {
//        this.seekCallback = callback;
//    }
//	
//	private KafkaConsumer<?, ?> getKafkaConsumer() {
//        // 通过反射获取 KafkaConsumer 实例（示例代码，需根据实际情况调整）
//        try {
//            Field consumerField = KafkaBatchConsumer.class.getDeclaredField("consumer");
//            consumerField.setAccessible(true);
//            return (KafkaConsumer<?, ?>) consumerField.get(this);
//        } catch (Exception e) {
//            throw new RuntimeException("无法获取 KafkaConsumer 实例", e);
//        }
//    }
//	
//	@Override
//    public void onPartitionsAssigned(Map<TopicPartition, Long> assignments, ConsumerSeekCallback callback) {
//        // 获取 KafkaConsumer 实例（需通过反射或其他方式）
//        KafkaConsumer<?, ?> consumer = getKafkaConsumer();
//        
//        // 获取所有分区的最新偏移量
//        Map<TopicPartition, Long> endOffsets = consumer.endOffsets(assignments.keySet());
//        
//        // 定位到最新偏移量
//        endOffsets.forEach((partition, offset) -> {
//            consumer.seek(partition, offset);
//            log.info("Partition {} 偏移量已定位到 {}", partition.partition(), offset);
//        });
//    }
	
	public static void main(String[] args) {
		String topic = "sdp-up-0-heart"; // 替换为你的主题名称
        String bootstrapServers = "110.41.14.225:9900"; // 替换为你的Kafka服务器地址
        
     // 创建消费者配置
        Map<String, Object> consumerProps = new HashMap<>();
        consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "storage-consumer"); // 这里用一个虚拟的消费者组ID
        consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); // 禁用自动提交
        
     // 创建KafkaConsumer实例
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProps, new StringDeserializer(), new StringDeserializer());
        
     // 获取主题的所有分区
        List<TopicPartition> partitions = consumer.partitionsFor(topic).stream()
            .map(p -> new TopicPartition(topic, p.partition()))
            .collect(Collectors.toList());
        
        // 手动分配分区（关键步骤）
        consumer.assign(partitions);
        
        // 查询最新偏移量
        Map<TopicPartition, Long> endOffsets = consumer.endOffsets(partitions);
        
        // 将每个分区的偏移量定位到最新位置
        endOffsets.forEach((partition, offset) -> {
            consumer.seek(partition, offset);
            consumer.commitSync();
            System.out.printf("Partition %d set to offset %d%n", partition.partition(), offset);
        });
        
        consumer.close();
	}
	
	

	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "report")
	public void eqStatusLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				ReportBiz reportBiz = BeanFactory.getBean(ReportBiz.class);
				reportBiz.execute(packageList);
				
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "user")
	public void eqUserLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				UserBiz userBiz = BeanFactory.getBean(UserBiz.class);
				userBiz.execute(packageList);
				
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "login")
	public void loginLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				LoginBiz loginBiz = BeanFactory.getBean(LoginBiz.class);
				loginBiz.execute(packageList);
				
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
		
	}
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "heart")
	public void heartLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				HeartBiz heartBiz = BeanFactory.getBean(HeartBiz.class);
				heartBiz.execute(packageList);
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "query")
	public void queryLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				QueryBiz queryBiz = BeanFactory.getBean(QueryBiz.class);
				queryBiz.execute(packageList);
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "version")
	public void versionLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				VersionBiz versionBiz = BeanFactory.getBean(VersionBiz.class);
				versionBiz.execute(packageList);
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	@KafkaListener(topics = "${sdp.server.up-topic-prefix}" + "set")
	public void setLog(List<String> records, Acknowledgment ack) {
		List<CommPackage> packageList = new ArrayList<>();
		try {
			if (records.size() > 0) {
				for (String msg : records) {
					packageList.add(JsonUtils.parseBean(Base64.decodeStr(msg), CommPackage.class));
				}
				SetBiz setBiz = BeanFactory.getBean(SetBiz.class);
				setBiz.execute(packageList);
			}
			
		} catch (Exception e) {
			printErrorLog(e);
		} finally {
			ack.acknowledge();
		}
	}
	
	private void printErrorLog(Exception e) {
		log.error("错误消息为：" + e.getMessage());
		for (StackTraceElement element : e.getStackTrace()) {
			log.error(element.toString());
		}
	}
	
	
}
