//package com.example.producer;
//
//
//import com.example.Utils.CsvReader;
//import com.example.entity.StockTrade;
//import com.opencsv.bean.CsvToBean;
//import com.opencsv.bean.CsvToBeanBuilder;
//import jakarta.annotation.PostConstruct;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
//import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.SerializationFeature;
//import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.stereotype.Service;
//
//import java.io.FileInputStream;
//import java.io.IOException;
//import java.io.InputStreamReader;
//import java.nio.charset.StandardCharsets;
//import java.util.Iterator;
//import java.util.List;
//
//@Service
//@Slf4j
//public class StockDataProducer {
//
//    private final KafkaTemplate<String, String> kafkaTemplate;
//    private final ObjectMapper objectMapper = new ObjectMapper();
//    private static final String TOPIC = "stock-trades-";
//
//    private String csvFilePath = "E:\\作业\\实时计算\\股票数据-样例\\股票数据2.csv";
//
//    private Iterator<StockTrade> tradeIterator;
//
//    public StockDataProducer(KafkaTemplate<String, String> kafkaTemplate) {
//        this.kafkaTemplate = kafkaTemplate;
//    }
//
//    @PostConstruct
//    public void init() {
//        objectMapper.registerModule(new JavaTimeModule());
//        objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
//
//        try {
//            // 使用 InputStreamReader 指定编码格式
//            InputStreamReader inputStreamReader = new InputStreamReader(new FileInputStream(csvFilePath), StandardCharsets.US_ASCII);
//            CsvToBean<StockTrade> build = new CsvToBeanBuilder<StockTrade>(inputStreamReader)
//                    .withType(StockTrade.class)
//                    .build();
//            System.out.println(build);
//            List<StockTrade> trades = CsvReader.readCsv(csvFilePath);
//
//            tradeIterator = trades.iterator();
//            while(tradeIterator.hasNext()) {
//                try {
//
//                    StockTrade trade = tradeIterator.next();
//                    String message = objectMapper.writeValueAsString(trade);
//                    kafkaTemplate.send(TOPIC, message);
//
//                } catch (Exception e) {
//                    e.printStackTrace();
//                }
//            }
//            log.warn("生产者发送消息了");
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
//    }
//
//}
