package com.example.lightenergypolymerize.controller;

import com.example.lightenergypolymerize.util.AppVariable;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.Resource;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;

@RestController
@RequestMapping("/kafka")
public class KafkaController {
    //canal将mysql binlog同步到kafka中的topic
    private static final String CANAL_TOPIC = "community-canal-to-kafka";
    @Resource
    private ObjectMapper objectMapper;
    @Resource
    private RedisTemplate<String, Object> redisTemplate;

    @KafkaListener(topics = {CANAL_TOPIC})
    public void canalListen(String data, Acknowledgment acknowledgment) throws JsonProcessingException {
        HashMap<String, Object> map = objectMapper.readValue(data, HashMap.class);
        if(!map.isEmpty() && map.get("database").toString().equals("community") &&
        map.get("table").toString().equals("answer")) {
            //更新redis缓存
            ArrayList<LinkedHashMap<String, Object>> list =
                    (ArrayList<LinkedHashMap<String, Object>>)map.get("data");
            String cacheKey = "";
            for(LinkedHashMap<String, Object> answer : list) {
                cacheKey = AppVariable.getListCacheKey(
                        Long.parseLong(answer.get("uid").toString()),
                        Integer.parseInt(answer.get("model").toString()),
                        Integer.parseInt(answer.get("type").toString())
                );
                redisTemplate.opsForValue().set(cacheKey, null);
            }
        }
        //手动确认
        acknowledgment.acknowledge();
    }
}
