package com.xsomnus.Distributed_Architecture.lb.impl;

import com.xsomnus.Distributed_Architecture.lb.AbstractLoadBalancer;
import com.xsomnus.Distributed_Architecture.lb.Node;

import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author @xsomnus666_xiawenye★
 * @since 2019/7/19 0019 10:55
 * - 才需学也,学需静也/非淡泊无以明志，非宁静无以致远
 * <desc>
 * * 一致性 hash 算法由麻省理工学院的 Karger 及其合作者于1997年提出的，算法提出之初是用于大规模缓存系统的负载均衡。它的工作过程是这样的，首先根据 ip 或者其他的信息为缓存节点生成一个 hash，并将这个 hash 投射到 [0, 2^32 - 1] 的圆环上。当有查询或写入请求时，则为缓存项的 key 生成一个 hash 值。然后查找第一个大于或等于该 hash 值的缓存节点，并到这个节点中查询或写入缓存项。如果当前节点挂了，则在下一次查询或写入缓存时，为缓存项查找另一个大于其 hash 值的缓存节点即可。大致效果如下图所示，每个缓存节点在圆环上占据一个位置。如果缓存项的 key 的 hash 值小于缓存节点 hash 值，则到该缓存节点中存储或读取缓存项。比如下面绿色点对应的缓存项将会被存储到 cache-2 节点中。由于 cache-3 挂了，原本应该存到该节点中的缓存项最终会存储到 cache-4 节点中。
 * </desc>
 */
public class ConsitentHashLoadBalancer extends AbstractLoadBalancer {

    private final ConcurrentHashMap<String, ConsistentHashSelector> selectors = new ConcurrentHashMap<>();

    protected Node doSelect(List<Node> invokers, String url) {

        int identifyHashCode = System.identityHashCode(invokers);
        ConsistentHashSelector selector = selectors.get(url);

        if (selector == null || selector.identityHashCode != identifyHashCode) {
            selectors.put(url, new ConsistentHashSelector(invokers, identifyHashCode));
            selector = selectors.get(url);
        }

        return selector.select(url);
    }

    private static final class ConsistentHashSelector {

        private final TreeMap<Long, Node> virtualNodes;
        private final int identityHashCode;

        ConsistentHashSelector(List<Node> invokers, int identityHashCode) {
            this.virtualNodes = new TreeMap<>();
            this.identityHashCode = identityHashCode;
            int replicaNumber = 160;
            for (Node node : invokers) {
                String url = node.getUrl();
                for (int i = 0; i < replicaNumber; i++) {
                    byte[] digest = md5(url + i);
                    for (int j = 0; j < 4; j++) {
                        long m = hash(digest, j);
                        virtualNodes.put(m, node);
                    }
                }
            }

            System.out.println("----" + virtualNodes.size());

        }


        private Node select(String key) {
            byte[] digest = md5(key);
            return selectForKey(hash(digest, 0));
        }

        private Node selectForKey(long hash) {
            Map.Entry<Long, Node> entry = virtualNodes.ceilingEntry(hash);
            if (entry == null) {
                entry = virtualNodes.firstEntry();
            }
            return entry.getValue();
        }

        private long hash(byte[] digest, int number) {
            return (((long) (digest[3 + number * 4] & 0xFF) << 24)
                    | ((long) (digest[2 + number * 4] & 0xFF) << 16)
                    | ((long) (digest[1 + number * 4] & 0xFF) << 8)
                    | ((long) (digest[number * 4] & 0xFF)))
                    & 0xFFFFFFFFL;
        }

        private byte[] md5(String val) {
            MessageDigest md5;
            try {
                md5 = MessageDigest.getInstance("MD5");
            } catch (NoSuchAlgorithmException e) {
                throw new IllegalStateException(e.getMessage(), e);
            }
            md5.reset();
            byte[] bytes = val.getBytes(StandardCharsets.UTF_8);
            md5.update(bytes);
            return md5.digest();
        }
    }
}
