package org.zoomdev.zoom.hash;

import java.util.Collection;
import java.util.List;
import java.util.LongSummaryStatistics;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class DataGenerator {

    private static final Random random = new Random();

    public static Statistics generateData(int fromUserId, int toUserId,
                                          long totalCount, Visitor<Data> visitor) {
        // 参数校验
        if (fromUserId > toUserId) throw new IllegalArgumentException("Invalid user ID range");
        if (totalCount <= 0) throw new IllegalArgumentException("Total count must be positive");

        int userCount = toUserId - fromUserId + 1;
        AtomicInteger idCounter = new AtomicInteger(1);

        // 1. 分配权重
        int[] weights = allocateWeights(userCount);

        // 2. 计算每个用户的数据量
        int[] dataCounts = calculateDataCounts(weights, totalCount);

        // 3. 生成数据并收集统计信息
        return generateAndVisit(fromUserId, userCount, dataCounts, idCounter, visitor);
    }

    public static void main(String[] args) {
        // 创建1000个处理节点
        List<Node> nodes = IntStream.range(0, 1000)
                .mapToObj(Node::new)
                .collect(Collectors.toList());


        // 创建访问者（包含一致性哈希路由）
        ModVisitor visitor = new ModVisitor(nodes);

        // 生成1亿条数据，user_id范围1-10万
        long startTime = System.currentTimeMillis();
        Statistics stats = generateData(1, 100_000, 100_000_000, visitor);
        long duration = System.currentTimeMillis() - startTime;

        System.out.println("Generation completed in " + duration + "ms");
        System.out.println(stats);

        // 验证分布均匀性
        analyzeDistribution(stats.nodeDistribution, 100_000_000);
    }

    public static void analyzeDistribution(Map<Integer, Long> distribution, long total) {
        LongSummaryStatistics stats = distribution.values().stream()
                .mapToLong(Long::longValue)
                .summaryStatistics();

        System.out.println("\nDistribution analysis:");
        System.out.printf("  Node count: %d\n", distribution.size());
        System.out.printf("  Min per node: %,d (%.2f%%)\n",
                stats.getMin(), stats.getMin() * 100.0 / total);
        System.out.printf("  Max per node: %,d (%.2f%%)\n",
                stats.getMax(), stats.getMax() * 100.0 / total);
        System.out.printf("  Average: %,.1f (%.2f%%)\n",
                stats.getAverage(), stats.getAverage() * 100.0 / total);
        System.out.printf("  Standard deviation: %,.1f\n",
                calculateStdDev(distribution.values(), stats.getAverage()));
    }

    private static int[] allocateWeights(int userCount) {
        int[] weights = new int[userCount];

        // 确保至少1个高权重用户(10)，其余1-9随机
        int highWeightUsers = Math.max(1, userCount / 10);
        for (int i = 0; i < highWeightUsers; i++) {
            weights[random.nextInt(userCount)] = 10;
        }

        for (int i = 0; i < userCount; i++) {
            if (weights[i] == 0) {
                weights[i] = 1 + random.nextInt(9);
            }
        }

        return weights;
    }

    private static int[] calculateDataCounts(int[] weights, long totalCount) {
        int userCount = weights.length;
        int[] counts = new int[userCount];
        long totalWeight = 0;

        for (int w : weights) totalWeight += w;

        long remaining = totalCount;
        for (int i = 0; i < userCount - 1; i++) {
            counts[i] = (int) (weights[i] * totalCount / totalWeight);
            remaining -= counts[i];
        }
        counts[userCount - 1] = (int) remaining;

        return counts;
    }

    private static Statistics generateAndVisit(int fromUserId, int userCount,
                                               int[] dataCounts, AtomicInteger idCounter,
                                               Visitor<Data> visitor) {
        int minDataPerUser = Integer.MAX_VALUE;
        int maxDataPerUser = 0;

        for (int i = 0; i < userCount; i++) {
            int userId = fromUserId + i;
            int count = dataCounts[i];

            // 更新统计
            if (count < minDataPerUser) minDataPerUser = count;
            if (count > maxDataPerUser) maxDataPerUser = count;

            // 生成数据并通知visitor
            for (int j = 0; j < count; j++) {
                visitor.visit(new Data(idCounter.getAndIncrement(), userId));
            }
        }

        return new Statistics(
                idCounter.get() - 1L,
                fromUserId,
                fromUserId + userCount - 1,
                minDataPerUser,
                maxDataPerUser,
                ((NodeHolder) visitor).getNodeDistribution()
        );
    }

    private static double calculateStdDev(Collection<Long> values, double mean) {
        double sum = 0;
        for (long value : values) {
            sum += Math.pow(value - mean, 2);
        }
        return Math.sqrt(sum / values.size());
    }


    static interface NodeHolder {
        Map<Integer, Long> getNodeDistribution();
    }

    public static class Node {
        final int id;
        final AtomicLong counter = new AtomicLong();

        public Node(int id) {
            this.id = id;
        }

        public void process(Data data) {
            counter.incrementAndGet();
            // 实际处理逻辑...
        }
    }

//    public static void main(String[] args) {
//        // 创建1000个处理节点
//        List<Node> nodes = IntStream.range(0, 1000)
//                .mapToObj(Node::new)
//                .collect(Collectors.toList());
//
//        // 创建一致性哈希路由器（每个物理节点100个虚拟节点）
//        ConsistentHashRouter router = new ConsistentHashRouter(nodes, 100);
//
//        // 创建访问者（包含一致性哈希路由）
//        ConsistentHashVisitor visitor = new ConsistentHashVisitor(router);
//
//        // 生成1亿条数据，user_id范围1-10万
//        long startTime = System.currentTimeMillis();
//        Statistics stats = generateData(1, 100_000, 100_000_000, visitor);
//        long duration = System.currentTimeMillis() - startTime;
//
//        System.out.println("Generation completed in " + duration + "ms");
//        System.out.println(stats);
//
//        // 验证分布均匀性
//        analyzeDistribution(stats.nodeDistribution, 100_000_000);
//    }

    public static class ConsistentHashRouter {
        private final TreeMap<Long, Node> circle = new TreeMap<>();
        private final int virtualNodeCount;

        public ConsistentHashRouter(Collection<Node> nodes, int virtualNodeCount) {
            this.virtualNodeCount = virtualNodeCount;
            for (Node node : nodes) {
                addNode(node);
            }
        }

        public void addNode(Node node) {
            for (int i = 0; i < virtualNodeCount; i++) {
                long hash = hash("Node-" + node.id + "-Virtual-" + i);
                circle.put(hash, node);
            }
        }

        public Node getNode(String key) {
            if (circle.isEmpty()) return null;
            long hash = hash(key);
            Map.Entry<Long, Node> entry = circle.ceilingEntry(hash);
            if (entry == null) {
                entry = circle.firstEntry();
            }
            return entry.getValue();
        }

        private long hash(String key) {
            // 使用FNV1_32_HASH算法
            final int p = 16777619;
            int hash = (int) 2166136261L;
            for (int i = 0; i < key.length(); i++) {
                hash = (hash ^ key.charAt(i)) * p;
            }
            hash += hash << 13;
            hash ^= hash >> 7;
            hash += hash << 3;
            hash ^= hash >> 17;
            hash += hash << 5;
            return hash & 0xFFFFFFFFL;
        }
    }

    public static class ModVisitor implements Visitor<Data>, NodeHolder {
        private final List<Node> nodes;
        private final Map<Integer, AtomicLong> nodeCounters = new ConcurrentHashMap<>();

        public ModVisitor(List<Node> nodes) {
            this.nodes = nodes;
        }

        @Override
        public void visit(Data item) {
            int seg = item.userId % 1000;
            nodes.get(seg).process(item);

            // 记录节点处理量
            nodeCounters.computeIfAbsent(nodes.get(seg).id, k -> new AtomicLong())
                    .incrementAndGet();
        }

        public Map<Integer, Long> getNodeDistribution() {
            return nodeCounters.entrySet().stream()
                    .collect(Collectors.toMap(
                            Map.Entry::getKey,
                            e -> e.getValue().get()
                    ));
        }
    }

    public static class ConsistentHashVisitor implements Visitor<Data>, NodeHolder {
        private final ConsistentHashRouter router;
        private final Map<Integer, AtomicLong> nodeCounters = new ConcurrentHashMap<>();

        public ConsistentHashVisitor(ConsistentHashRouter router) {
            this.router = router;
        }

        @Override
        public void visit(Data data) {
            Node node = router.getNode("User-" + data.userId);
            node.process(data);

            // 记录节点处理量
            nodeCounters.computeIfAbsent(node.id, k -> new AtomicLong())
                    .incrementAndGet();
        }

        public Map<Integer, Long> getNodeDistribution() {
            return nodeCounters.entrySet().stream()
                    .collect(Collectors.toMap(
                            Map.Entry::getKey,
                            e -> e.getValue().get()
                    ));
        }
    }
}