package org.zoomdev.zoom.hash;

import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.LongSummaryStatistics;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class LoadBalance {


    static interface DataProcessor {
        void process(Data data);

        void migrateData(int fromNodeId, int toNodeId, Predicate<Data> condition);

        Map<Integer, Long> getNodeLoadStats();

        void addNode(Node node);
    }

    static class Data {
        final int id;
        final int userId;

        public Data(int id, int userId) {
            this.id = id;
            this.userId = userId;
        }
    }

    static class Node {
        final int id;
        final ConcurrentHashMap<Integer, List<Data>> userData = new ConcurrentHashMap<>();
        final AtomicLong counter = new AtomicLong();

        public Node(int id) {
            this.id = id;
        }

        public void addData(Data data) {
            userData.computeIfAbsent(data.userId, k -> new CopyOnWriteArrayList<>()).add(data);
            counter.incrementAndGet();
        }

        public List<Data> getUserData(int userId) {
            return userData.getOrDefault(userId, Collections.emptyList());
        }

        public long getDataCount() {
            return counter.get();
        }
    }

    static class DynamicHashRouter {
        private final List<Node> nodes = new CopyOnWriteArrayList<>();
        private final ConsistentHashRouter router;
        private final int virtualNodeCount;
        private final long loadThreshold;
        private final long maxNodeData;

        public DynamicHashRouter(int initialNodes, int virtualNodeCount, long loadThreshold) {
            this.virtualNodeCount = virtualNodeCount;
            this.loadThreshold = loadThreshold;
            this.maxNodeData = loadThreshold * 3 / 2; // 1.5倍阈值作为软上限

            // 初始化节点
            for (int i = 0; i < initialNodes; i++) {
                nodes.add(new Node(i));
            }
            this.router = new ConsistentHashRouter(nodes, virtualNodeCount);
        }

        public synchronized void addNode() {
            int newNodeId = nodes.size();
            Node newNode = new Node(newNodeId);
            nodes.add(newNode);
            router.addNode(newNode);
            System.out.println("Added new node: " + newNodeId);
        }

        public Node getNode(String key) {
            return router.getNode(key);
        }

        public List<Node> getOverloadedNodes() {
            return nodes.stream()
                    .filter(node -> node.getDataCount() > loadThreshold)
                    .collect(Collectors.toList());
        }

        public List<Node> getUnderloadedNodes() {
            return nodes.stream()
                    .filter(node -> node.getDataCount() < loadThreshold / 2)
                    .collect(Collectors.toList());
        }

        public void balanceLoad() {
            List<Node> overloadedNodes = getOverloadedNodes();
            List<Node> underloadedNodes = getUnderloadedNodes();

            if (overloadedNodes.isEmpty()) return;

            if (underloadedNodes.isEmpty()) {
                addNode();
                underloadedNodes = Collections.singletonList(nodes.get(nodes.size() - 1));
            }

            for (Node overloaded : overloadedNodes) {
                for (Node underloaded : underloadedNodes) {
                    if (overloaded.getDataCount() <= loadThreshold) break;

                    // 迁移部分用户数据
                    migrateUsers(overloaded, underloaded);
                }
            }
        }

        public Map<Integer, Long> getNodeStats() {
            return nodes.stream()
                    .collect(Collectors.toMap(
                            node -> node.id,
                            Node::getDataCount
                    ));
        }

        private void migrateUsers(Node from, Node to) {
            // 选择部分用户迁移
            Set<Integer> userIds = from.userData.keySet();
            int usersToMigrate = Math.max(1, userIds.size() / 10); // 迁移10%的用户

            userIds.stream()
                    .limit(usersToMigrate)
                    .forEach(userId -> {
                        List<Data> userData = from.userData.remove(userId);
                        if (userData != null) {
                            to.userData.put(userId, userData);
                            long count = userData.size();
                            from.counter.addAndGet(-count);
                            to.counter.addAndGet(count);
                        }
                    });

            System.out.printf("Migrated data from Node %d to Node %d%n", from.id, to.id);
        }
    }

    static class ConsistentHashRouter {
        private final TreeMap<Long, Node> circle = new TreeMap<>();
        private final int virtualNodeCount;

        public ConsistentHashRouter(Collection<Node> nodes, int virtualNodeCount) {
            this.virtualNodeCount = virtualNodeCount;
            nodes.forEach(this::addNode);
        }

        public void addNode(Node node) {
            for (int i = 0; i < virtualNodeCount; i++) {
                long hash = hash("Node-" + node.id + "-Virtual-" + i);
                circle.put(hash, node);
            }
        }

        public Node getNode(String key) {
            if (circle.isEmpty()) return null;
            long hash = hash(key);
            Map.Entry<Long, Node> entry = circle.ceilingEntry(hash);
            if (entry == null) {
                entry = circle.firstEntry();
            }
            return entry.getValue();
        }

        private long hash(String key) {
            // 使用FNV1_32_HASH算法
            final int p = 16777619;
            int hash = (int) 2166136261L;
            for (int i = 0; i < key.length(); i++) {
                hash = (hash ^ key.charAt(i)) * p;
            }
            hash += hash << 13;
            hash ^= hash >> 7;
            hash += hash << 3;
            hash ^= hash >> 17;
            hash += hash << 5;
            return hash & 0xFFFFFFFFL;
        }
    }

    static class DynamicDataGenerator {
        private final DynamicHashRouter router;
        private final int batchSize;
        private final long loadThreshold;
        private final AtomicLong totalGenerated = new AtomicLong();
        private final AtomicInteger nextUserId = new AtomicInteger(1);

        public DynamicDataGenerator(int initialNodes, int batchSize, long loadThreshold) {
            this.batchSize = batchSize;
            this.loadThreshold = loadThreshold;
            this.router = new DynamicHashRouter(initialNodes, 100, loadThreshold);
        }

        public static void main(String[] args) {
            // 参数配置
            int initialNodes = 10;
            int batchSize = 1_000_000; // 每批1000万
            long loadThreshold = 1_000_000; // 节点负载阈值100万
            long totalCount = 10_000_000; // 总共1亿数据

            DynamicDataGenerator generator = new DynamicDataGenerator(
                    initialNodes, batchSize, loadThreshold);

            System.out.println("Starting dynamic data generation...");
            long startTime = System.currentTimeMillis();
            generator.generateBatches(totalCount);
            long duration = System.currentTimeMillis() - startTime;

            System.out.printf(" Generation completed in %,d ms Final stats: %s",
                    duration,
                    generator.router.getNodeStats().entrySet().stream()
                            .sorted(Map.Entry.comparingByKey())
                            .map(e -> String.format("  Node %2d: %,d", e.getKey(), e.getValue()))
                            .collect(Collectors.joining("\n"))
            );
        }

        public void generateBatches(long totalCount) {
            while (totalGenerated.get() < totalCount) {
                long currentBatch = Math.min(batchSize, totalCount - totalGenerated.get());
                generateBatch(currentBatch);
                checkAndBalance();
            }
        }

        private void generateBatch(long batchSize) {
            System.out.printf("Generating batch of %,d records...%n", batchSize);

            IntStream.range(0, (int) batchSize).parallel().forEach(i -> {
                int userId = nextUserId.getAndIncrement();
                Data data = new Data((int) totalGenerated.incrementAndGet(), userId);
                Node node = router.getNode("User-" + userId);
                node.addData(data);
            });

            System.out.println("Batch generation completed");
            printStats();
        }

        private void checkAndBalance() {
            System.out.println("Checking node loads...");
            router.balanceLoad();
            printStats();
        }

        private void printStats() {
            Map<Integer, Long> stats = router.getNodeStats();
            LongSummaryStatistics summary = stats.values().stream()
                    .mapToLong(Long::longValue)
                    .summaryStatistics();

            System.out.printf("Node Load Statistics: Nodes: %d  Total: %,d Min: %,d (%.2f%%) Avg: %,.1f (%.2f%%) Max: %,d (%.2f%%)",
                    stats.size(),
                    summary.getSum(),
                    summary.getMin(), percent(summary.getMin(), summary.getSum()),
                    summary.getAverage(), percent((long) summary.getAverage(), summary.getSum()),
                    summary.getMax(), percent(summary.getMax(), summary.getSum())
            );
        }

        private double percent(long value, long total) {
            return total == 0 ? 0 : value * 100.0 / total;
        }
    }
}
