// KafkaDataProducer.java
package com.lhd.app.producer;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 第一层：数据生成和Kafka生产者（修复版）
 */
public class KafkaDataProducer {
    private static final String KAFKA_BOOTSTRAP_SERVERS = "hadoop102:9092";
    private static final String TOPIC_NAME = "pet_behavior_topic";
    
    private static final Map<String, Integer> BEHAVIOR_WEIGHT = new HashMap<>();
    private static final List<Map<String, Object>> PET_CATEGORY_CONFIG = new ArrayList<>();
    private static final int TIME_WINDOW_DAYS = 180;
    
    static {
        // 初始化行为权重
        BEHAVIOR_WEIGHT.put("购买", 5);
        BEHAVIOR_WEIGHT.put("加购", 3);
        BEHAVIOR_WEIGHT.put("收藏", 2);
        BEHAVIOR_WEIGHT.put("浏览", 1);
        
        // 初始化宠物类型-类目映射
        addPetCategoryConfig("狗", "狗狗", false);
        addPetCategoryConfig("狗", "狗零食", false);
        addPetCategoryConfig("狗", "犬主粮", false);
        addPetCategoryConfig("猫", "猫咪", false);
        addPetCategoryConfig("猫", "猫零食", false);
        addPetCategoryConfig("猫", "猫主粮", false);
        addPetCategoryConfig("通用", "猫/狗玩具", true);
        addPetCategoryConfig("通用", "猫/狗保健品", true);
        addPetCategoryConfig("仓鼠", "仓鼠类及其它小宠>仓鼠", false);
        addPetCategoryConfig("鸟类", "鸟类及用品>鸟粮", false);
    }
    
    private static void addPetCategoryConfig(String petType, String category, boolean isCommon) {
        Map<String, Object> config = new HashMap<>();
        config.put("petType", petType);
        config.put("category", category);
        config.put("isCommon", isCommon);
        PET_CATEGORY_CONFIG.add(config);
    }
    
    private static String generateUserId(int index) {
        return "U" + (10000000 + index);
    }
    
    private static LocalDateTime generateBehaviorTime(LocalDateTime currentDate, Random random) {
        int daysAgo = random.nextInt(TIME_WINDOW_DAYS + 1);
        return currentDate.minusDays(daysAgo)
                .withHour(random.nextInt(24))
                .withMinute(random.nextInt(60))
                .withSecond(random.nextInt(60));
    }
    
    private static String generateProductTitle(String category, String ageStage) {
        Random random = new Random();
        List<String> prefixes = Arrays.asList("优质", "新品", "热销", "特惠");
        String prefix = prefixes.get(random.nextInt(prefixes.size()));
        
        String categoryName = category.contains(">") ? category.split(">")[1] : category;
        
        if (ageStage != null && !ageStage.isEmpty()) {
            String ageKeyword = ageStage.contains("幼年") ? "幼宠专用" :
                    ageStage.contains("成年") ? "成宠专用" : "老年专用";
            return String.format("%s%s%s", prefix, categoryName, ageKeyword);
        }
        return String.format("%s%s", prefix, categoryName);
    }
    
    /**
     * 生成单条行为记录（JSON格式）- 修复版
     */
    private static String generateBehaviorRecord(int index, Random random) {
        LocalDateTime currentDate = LocalDateTime.of(2025, 5, 1, 0, 0, 0);
        
        String userId = generateUserId(index);
        LocalDateTime behaviorTime = generateBehaviorTime(currentDate, random);
        String behaviorType = new ArrayList<>(BEHAVIOR_WEIGHT.keySet()).get(random.nextInt(BEHAVIOR_WEIGHT.size()));
        Map<String, Object> categoryConfig = PET_CATEGORY_CONFIG.get(random.nextInt(PET_CATEGORY_CONFIG.size()));
        
        String petType = (String) categoryConfig.get("petType");
        String category = (String) categoryConfig.get("category");
        boolean isCommon = (boolean) categoryConfig.get("isCommon");
        
        String ageStage = null;
        if (("狗".equals(petType) || "猫".equals(petType))) {
            List<String> ageStages = Arrays.asList("幼年(0-1岁)", "成年(1-7岁)", "老年(7岁+)");
            ageStage = ageStages.get(random.nextInt(ageStages.size()));
        }
        
        String productTitle = generateProductTitle(category, ageStage);
        int baseWeight = BEHAVIOR_WEIGHT.get(behaviorType);
        
        // 构建JSON记录 - 修复格式问题
        StringBuilder json = new StringBuilder();
        json.append("{");
        json.append("\"record_id\":\"").append("REC_").append(System.currentTimeMillis()).append("_").append(index).append("\",");
        json.append("\"user_id\":\"").append(userId).append("\",");
        json.append("\"behavior_type\":\"").append(behaviorType).append("\",");
        json.append("\"behavior_time\":\"").append(behaviorTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))).append("\",");
        json.append("\"product_category\":\"").append(category).append("\",");
        json.append("\"product_title\":\"").append(productTitle).append("\",");
        json.append("\"base_behavior_weight\":").append(baseWeight).append(",");
        json.append("\"pet_type\":\"").append(petType).append("\",");
        json.append("\"is_common_category\":").append(isCommon).append(",");
        json.append("\"age_stage\":").append(ageStage != null ? "\"" + ageStage + "\"" : "null");
        json.append("}");
        
        return json.toString();
    }
    
    /**
     * 生成数据并发送到Kafka，同时保存到CSV - 修复版
     */
    public static void generateAndSendData(int numRecords, String csvFilePath) {
        // 创建数据目录
        new File("data").mkdirs();
        
        // Kafka生产者配置
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP_SERVERS);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.ACKS_CONFIG, "1");
        
        AtomicInteger successCount = new AtomicInteger(0);
        
        try (KafkaProducer<String, String> producer = new KafkaProducer<>(props);
             FileWriter csvWriter = new FileWriter(csvFilePath)) {
            
            // 写入CSV表头
            csvWriter.write("record_id,user_id,behavior_type,behavior_time,product_category,product_title,base_behavior_weight,pet_type,is_common_category,age_stage\n");
            
            Random random = new Random();
            
            for (int i = 0; i < numRecords; i++) {
                try {
                    String jsonRecord = generateBehaviorRecord(i, random);
                    
                    // 发送到Kafka
                    ProducerRecord<String, String> kafkaRecord = 
                        new ProducerRecord<>(TOPIC_NAME, String.valueOf(i), jsonRecord);
                    producer.send(kafkaRecord, (metadata, exception) -> {
                        if (exception == null) {
                            successCount.incrementAndGet();
                        } else {
                            System.err.println("发送失败: " + exception.getMessage());
                        }
                    });
                    
                    // 直接解析生成的JSON并写入CSV
                    Map<String, String> recordMap = parseSimpleJson(jsonRecord);
                    csvWriter.write(String.format(
                        "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n",
                        recordMap.get("record_id"),
                        recordMap.get("user_id"),
                        recordMap.get("behavior_type"),
                        recordMap.get("behavior_time"),
                        recordMap.get("product_category"),
                        recordMap.get("product_title"),
                        recordMap.get("base_behavior_weight"),
                        recordMap.get("pet_type"),
                        recordMap.get("is_common_category"),
                        recordMap.get("age_stage") != null ? recordMap.get("age_stage") : ""
                    ));
                    
                    // 批量提交
                    if (i % 1000 == 0) {
                        producer.flush();
                        csvWriter.flush();
                        System.out.println("已生成 " + i + " 条记录");
                    }
                    
                } catch (Exception e) {
                    System.err.println("生成第 " + i + " 条记录失败: " + e.getMessage());
                }
            }
            
            // 最终提交
            producer.flush();
            csvWriter.flush();
            
            System.out.println("数据生成完成！");
            System.out.println("成功发送到Kafka: " + successCount.get() + " 条记录");
            System.out.println("Kafka主题: " + TOPIC_NAME);
            System.out.println("原始数据CSV: " + csvFilePath);
            System.out.println("总记录数: " + numRecords);
            
        } catch (IOException e) {
            System.err.println("数据生成失败: " + e.getMessage());
            e.printStackTrace();
        }
    }
    
    private static Map<String, String> parseSimpleJson(String json) {
        Map<String, String> map = new HashMap<>();
        try {
            // 简单解析JSON
            String cleanJson = json.replace("{", "").replace("}", "").replace("\"", "");
            String[] pairs = cleanJson.split(",");
            for (String pair : pairs) {
                String[] keyValue = pair.split(":", 2);
                if (keyValue.length == 2) {
                    String key = keyValue[0].trim();
                    String value = keyValue[1].trim();
                    // 处理null值
                    if ("null".equals(value)) {
                        value = "";
                    }
                    map.put(key, value);
                }
            }
        } catch (Exception e) {
            System.err.println("JSON解析失败: " + json);
        }
        return map;
    }
    
    public static void main(String[] args) {
        // 生成1万条测试数据（可根据需要调整）
        int numRecords = 10000;
        String csvFilePath = "data/原始宠物行为数据.csv";
        
        System.out.println("开始生成宠物行为数据...");
        System.out.println("目标记录数: " + numRecords);
        System.out.println("Kafka服务器: " + KAFKA_BOOTSTRAP_SERVERS);
        
        generateAndSendData(numRecords, csvFilePath);
    }
}