package com.blog.service.impl;

import com.blog.dto.ArticleDTO;
import com.blog.dto.StringMessageDTO;
import com.blog.entity.Article;
import com.blog.service.KafkaProducerService;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.util.List;

import static com.blog.utils.Staticmessage.ADDARTICLEANDES;
import static com.blog.utils.Staticmessage.INITESDATA;

@Service
@Slf4j
public class KafkaProducerServiceImpl implements KafkaProducerService {
    private static final String TOPIC = "blog_topic";

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;
    @Autowired
    private ObjectMapper objectMapper;

    @Override
    public void sendArticleBatch(List<Article> batch,Integer pageNum) {
        try {
            String message = objectMapper.writeValueAsString(batch);
            StringMessageDTO stringMessageDTO = new StringMessageDTO(message,pageNum,INITESDATA);
            String batchmessage = objectMapper.writeValueAsString(stringMessageDTO);
            log.info("第{}篇文章发送到Kafka, 大小: {}",pageNum,batch.size());
            this.kafkaTemplate.send(TOPIC, batchmessage);
        } catch (JsonProcessingException e) {
            log.error("序列化文章批次失败", e);
        }
    }

    @Override
    public void addArticleandEs(ArticleDTO articledto) {
        try {
            String articleDto = objectMapper.writeValueAsString(articledto);
            StringMessageDTO stringMessageDTO = new StringMessageDTO(articleDto,null,ADDARTICLEANDES);
            String message = objectMapper.writeValueAsString(stringMessageDTO);
            this.kafkaTemplate.send(TOPIC,message);
            log.info("保存文章发送kafka中");
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}