package com.bw.gmall.realtime.utils;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;

/**
 * 数据生成工具类，用于生成大量的测试数据以填充ClickHouse表
 */
public class DataGeneratorUtil {

    // 生成并发送大量测试数据到Kafka
    public static void generateLargeTestData(int dataCount, int timePointCount, String topic) {
        ArrayList<JSONObject> jsonObjects = new ArrayList<>();
        Random random = new Random();
        long baseTs = System.currentTimeMillis();

        // 地域编码列表
        String[] regions = {"310000", "320000", "330000", "340000", "350000", "410000", "420000", "430000"};
        // 品牌列表
        String[] brands = {"iPhone", "Samsung", "Xiaomi", "OPPO", "vivo", "Huawei", "Apple", "Lenovo"};
        // 渠道列表
        String[] channels = {"Appstore", "HUAWEI", "OPPO", "vivo", "Xiaomi", "Tencent", "Baidu", "Alibaba", "pc_web", "mobile_web"};
        // 机型列表
        String[] models = {"iPhone XS", "Galaxy S21", "Mi 11", "Reno6", "X70", "P50", "Mate40", "Nova9", "ThinkPad", "MacBook Pro"};
        // 操作系统列表
        String[] osList = {"iOS 14.5", "iOS 15.0", "Android 11", "Android 12", "Android 13", "HarmonyOS 2.0", "Windows 10", "macOS Monterey"};
        // 版本号列表
        String[] versions = {"v2.1.1", "v2.1.0", "v2.0.9", "v3.0.0", "v3.0.1"};
        // 页面ID列表
        String[] pageIds = {"good_detail", "home", "category", "search", "cart", "order_confirm", "payment", "user_center"};
        // 上一页面ID列表
        String[] lastPageIds = {"home", "category", "search", "recommend", "cart", "", null};
        // 来源类型列表
        String[] sourceTypes = {"promotion", "personal", "search", "home", "recommend", "category", "activity"};
        // 商品ID范围
        int productIdMin = 1000;
        int productIdMax = 9999;
        // 行为类型列表
        String[] actionTypes = {"addcart", "favor", "click", "view", "share"};

        // 为每个时间点生成数据
        for (int t = 0; t < timePointCount; t++) {
            // 每个时间点间隔10秒
            long currentTs = baseTs + (t * 10000);
            
            // 为当前时间点生成指定数量的数据
            for (int i = 0; i < dataCount; i++) {
                JSONObject jsonObj = new JSONObject();
                JSONObject common = new JSONObject();
                JSONObject page = new JSONObject();

                // 生成common字段
                common.put("ar", regions[random.nextInt(regions.length)]);
                common.put("ba", brands[random.nextInt(brands.length)]);
                common.put("ch", channels[random.nextInt(channels.length)]);
                common.put("is_new", random.nextBoolean() ? "1" : "0");
                common.put("md", models[random.nextInt(models.length)]);
                common.put("mid", "mid_" + (random.nextInt(100000) + 1));
                common.put("os", osList[random.nextInt(osList.length)]);
                common.put("uid", "user_" + (random.nextInt(5000) + 1)); // 5000个不同的用户ID
                common.put("vc", versions[random.nextInt(versions.length)]);
                common.put("shop_id", random.nextInt(10) + 1); // 1-10个店铺
                common.put("device_type", random.nextInt(2)); // 0:手机, 1:平板

                // 生成page字段
                page.put("during_time", random.nextInt(30000) + 1000); // 1-31秒
                page.put("item", productIdMin + random.nextInt(productIdMax - productIdMin + 1) + "");
                page.put("item_type", "sku_id");
                String lastPage = lastPageIds[random.nextInt(lastPageIds.length)];
                if (lastPage != null) {
                    page.put("last_page_id", lastPage);
                }
                page.put("page_id", pageIds[random.nextInt(pageIds.length)]);
                page.put("source_type", sourceTypes[random.nextInt(sourceTypes.length)]);

                // 组装数据
                jsonObj.put("common", common);
                jsonObj.put("page", page);
                jsonObj.put("ts", currentTs);

                // 80%的概率包含actions字段，增加加购和收藏行为的概率
                if (random.nextDouble() < 0.8) {
                    ArrayList<JSONObject> actions = new ArrayList<>();
                    int actionCount = random.nextInt(3) + 1; // 1-3个动作
                    for (int j = 0; j < actionCount; j++) {
                        JSONObject action = new JSONObject();
                        action.put("action_id", "action_" + (random.nextInt(10) + 1));
                        action.put("item", productIdMin + random.nextInt(productIdMax - productIdMin + 1) + "");
                        action.put("item_type", "sku_id");
                        action.put("ts", currentTs + random.nextInt(30000)); // 相对于日志时间的偏移
                        
                        // 增加加购和收藏行为的概率
                        double actionProbability = random.nextDouble();
                        if (actionProbability < 0.3) {
                            action.put("action", "addcart");
                        } else if (actionProbability < 0.5) {
                            action.put("action", "favor");
                        } else {
                            action.put("action", actionTypes[random.nextInt(actionTypes.length)]);
                        }
                        
                        actions.add(action);
                    }
                    jsonObj.put("actions", actions);
                }

                jsonObjects.add(jsonObj);
            }
        }

        // 发送数据到Kafka
        sendToKafka(jsonObjects, topic);
    }

    // 将数据发送到Kafka
    private static void sendToKafka(ArrayList<JSONObject> jsonObjects, String topic) {
        Map<String, Object> config = new HashMap<>();
        config.put("bootstrap.servers", "hadoop102:9092");
        config.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        config.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // 创建Kafka生产者
        Producer<String, String> producer = new KafkaProducer<>(config);

        try {
            int batchSize = 1000;
            int sentCount = 0;
            
            for (JSONObject jsonObject : jsonObjects) {
                producer.send(new ProducerRecord<>(topic, jsonObject.toJSONString()));
                sentCount++;
                
                // 每发送1000条数据打印一次进度
                if (sentCount % batchSize == 0) {
                    System.out.println("已发送" + sentCount + "条数据...");
                    // 短暂休眠避免Kafka压力过大
                    Thread.sleep(100);
                }
            }
            
            System.out.println("数据发送完成，共发送" + sentCount + "条数据到主题" + topic);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            producer.close();
        }
    }

    // 主方法，方便直接运行
    public static void main(String[] args) {
        // 参数1：每个时间点生成的数据量
        // 参数2：时间点数量（每个时间点间隔10秒）
        // 参数3：目标Kafka主题
        generateLargeTestData(10000, 100, "ods_traffic");
        // 这将生成100,000条数据，覆盖约16分钟的数据窗口
    }
}