package com.california.pay.common.id;


import com.california.pay.common.utils.LocalUtils;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.math.NumberUtils;
import org.redisson.api.RAtomicLong;
import org.redisson.api.RBucket;
import org.redisson.api.RedissonClient;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.annotation.Autowired;

import java.util.Date;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

/**
 * 获取唯一id，最后生成18位整型
 * <p>
 * 长度为64bit,从高位到低位依次为
 * </p>
 * <p>
 * <pre>
 * 1bit   符号位
 * 41bits 时间偏移量从2016年1月1日零点到现在的毫秒数
 * 10bits 工作进程Id
 * 12bits 同一个毫秒内的自增量
 * </pre>
 */
@Slf4j
public class SnowflakeIdWorker implements IdWorker {

    /**
     * 16位序列最多机器数,二进制位数.32
     */
    public static final long MAX_WORK_ID_BITS_16 = 5L;
    /**
     * 18位序列最多机器数,二进制位数.1024
     */
    public static final long MAX_WORK_ID_BITS_18 = 10L;

    public static final long SEQUENCE_BITS_18 = 12L;
    public static final long SEQUENCE_BITS_16 = 10L;

    public static final long MAX_WORK_ID_16 = -1L ^ -1L << MAX_WORK_ID_BITS_16;
    public static final long MAX_WORK_ID_18 = -1L ^ -1L << MAX_WORK_ID_BITS_18;

    private static SnowflakeIdWorker myWorker;
    private long workerId;
    /**
     * 基准时间
     * 2016/1/1
     */
    private long twepoch = 1451577600000L;
    private long sequence = 0L;
    private long workerIdBits = MAX_WORK_ID_BITS_18;
    private long maxWorkerId = MAX_WORK_ID_18;
    private long sequenceBits = SEQUENCE_BITS_18;
    private long workerIdShift = sequenceBits;
    private long timestampLeftShift = sequenceBits + workerIdBits;
    private long sequenceMask = -1L ^ -1L << sequenceBits;
    private long lastTimestamp = -1L;
    private boolean is16digit = false;
    /**
     * ========================================================
     * =                      按名字取id                       =
     * ========================================================
     */
    private ConcurrentHashMap<String, SnowflakeIdWorker> idWorkers = new ConcurrentHashMap<String, SnowflakeIdWorker>();

    /**
     * 计算id中生成器的workId字段值.
     * @author zhouwenqing
     * @date 2018/11/29
     * @param id
     * @return int
     */
    @Override
    public int getWorkId(String id) {

        long idLong = NumberUtils.toLong(id, -1L);
        if (idLong == -1L) {
            return -1;
        }
        int workId = -1;
        if (id.length() == 18) {
            workId = (int) ((idLong >> SEQUENCE_BITS_18) & MAX_WORK_ID_18);

        } else if (id.length() == 16) {
            workId = (int) ((idLong >> SEQUENCE_BITS_16) & MAX_WORK_ID_16);
        }
        return workId;

    }

    /**
     * @param workerId
     */
    public SnowflakeIdWorker(final long workerId) {
        this(workerId, false);
    }

    /**
     * @param workerId  每个应用在不同的机器必须不同
     * @param is16digit 是否16位数
     */
    public SnowflakeIdWorker(final long workerId, final boolean is16digit) {
        super();
        //如果要生成16位的
        if (is16digit) {
            this.is16digit = true;
            //机器数最多只能是32台
            workerIdBits = MAX_WORK_ID_BITS_16;
            //同一个毫秒内的自增量最多1024
            sequenceBits = SEQUENCE_BITS_16;
            //-1L ^ -1L << workerIdBits;
            maxWorkerId = MAX_WORK_ID_16;
            workerIdShift = sequenceBits;
            timestampLeftShift = sequenceBits + workerIdBits;
            sequenceMask = -1L ^ -1L << sequenceBits;
        }
        if (workerId > maxWorkerId || workerId < 0) {
            throw new IllegalArgumentException(
                String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
        }
        this.workerId = workerId;
    }

    /**
     * 用于单机版快捷获取id，分布式请手动创建SnowflakeIdWorker对象，并赋予workerId，每个应用在不同的机器必须不同
     *
     * @return
     */
    public static SnowflakeIdWorker getInstance() {
        if (myWorker == null) {
            myWorker = new SnowflakeIdWorker(1);
        }
        return myWorker;
    }

    public static void main(String[] args) {
        final SnowflakeIdWorker worker2 = new SnowflakeIdWorker(10, true);
        /*for (int i = 0; i < 10000; i++) {
            System.out.println(worker2.nextId());
        }*/
        System.out.println(new Date(116, 0, 1).getTime());
        System.out.println(new Date(1451577600000L).toLocaleString());
        //System.out.println(worker2.nextId());
        //System.out.println(new BigInteger("0111111111111111111111111111111111111111111111111111111111111111", 2).toString(10));

        ExecutorService executorService = Executors.newCachedThreadPool();
        for (int i = 0; i < 100; i++) {
            executorService.submit(new Runnable() {
                @Override
                public void run() {
                    System.out.println(worker2.nextId("a"));
                    System.out.println(worker2.nextId("b"));
                }
            });
        }
        executorService.shutdown();
    }

    @Override
    public synchronized long nextId() {
        return innerNextId();
    }

    private long innerNextId() {
        long timestamp = this.timeGen();
        if (this.lastTimestamp == timestamp) {
            this.sequence = (sequence + 1) & sequenceMask;
            if (this.sequence == 0) {
                //System.out.println("###########" + sequenceMask);
                timestamp = this.tilNextMillis(this.lastTimestamp);
            }
        } else {
            this.sequence = 0;
        }
        if (timestamp < this.lastTimestamp) {
            try {
                throw new Exception(String.format("Clock moved backwards. Refusing to generate id for %d milliseconds",
                    this.lastTimestamp - timestamp));
            } catch (Exception e) {//重新再获取，直到获取到为止
                log.warn("", e);
                try {
                    //Thread.sleep(5);
                    TimeUnit.MILLISECONDS.sleep(5);
                } catch (InterruptedException e1) {
                    //
                }
                return innerNextId();
            }
        }

        this.lastTimestamp = timestamp;
        long nextId = ((timestamp - twepoch << timestampLeftShift)) | (this.workerId << workerIdShift)
            | (this.sequence);
        //System.out.println("timestamp:" + timestamp + ",timestampLeftShift:" + timestampLeftShift + ",nextId:" + nextId + ",workerId:"
        //	+ workerId + ",sequence:" + sequence);
        return nextId;
    }

    private long tilNextMillis(final long lastTimestamp) {
        long timestamp = this.timeGen();
        while (timestamp <= lastTimestamp) {
            timestamp = this.timeGen();
        }
        return timestamp;
    }

    private long timeGen() {
        return System.currentTimeMillis();
    }

    /**
     * 本方法与不带参数的nextId不能混用，本方法是建立多个idWorker实例，每个实例之间用名字区分，
     * 单个实例的id保证唯一，不同的实例会生成相同的id，所以不能在同一个业务中使用不同的名称获取id
     *
     * @param name worker名称
     */
    @Override
    public long nextId(String name) {
        //根据workerId生成多个相同workerId的实例
        SnowflakeIdWorker idWorker = idWorkers.get(name);
        if (idWorker == null) {
            //如果还未生成实例，则创建一个，注意这里需要同步实例
            synchronized (this) {
                idWorker = idWorkers.get(name);
                if (idWorker == null) {
                    //新生成一个实例
                    idWorker = new SnowflakeIdWorker(this.workerId, this.is16digit);
                    idWorkers.put(name, idWorker);
                    log.info("Create id worker " + name + ": " + idWorker);
                }
            }
        }

        return idWorker.nextId();
    }

    public synchronized void updateWorkId(long workId) {
        this.workerId = workId;
        idWorkers.forEach((name, worker) -> {
            worker.updateWorkId(workId);
        });
    }

    /**
     * SnowflakedIdWorker bean工厂类.
     * 使用redis的累加值取模，如果是16位则模{@link SnowflakeIdWorker#MAX_WORK_ID_16}
     * @author zhouwenqing
     * @date 2018/10/8
     */
    public static class RedisSonwflakeIdWorkerFactory implements FactoryBean<SnowflakeIdWorker>, DisposableBean {

        @Autowired
        private RedissonClient redissonClient;

        /**
         * id生成器名称，用来做redis中的key.
         * @author zhouwenqing
         * @date 2018/10/8
         */
        @Setter
        @Getter
        private String idWorkerName = "core:workId";

        /**
         * 是否是16位编码.
         */
        @Setter
        @Getter
        private boolean digit16Flag = false;


        /** 最小的workId*/
        @Setter
        @Getter
        private Long minWorkId = 0L;

        /** 最大的workId.*/
        @Setter
        @Getter
        private Long maxWorkId;

        /** workId的超时时间单位秒. */
        @Setter
        @Getter
        private int keyTimeout = 20;

        static String ipPid = LocalUtils.getLocalIP() + "@" + LocalUtils.getPid();

        private Thread monitorThread;

        private RBucket<String> workIdBucket;

        private volatile boolean canRunning = true;


        @Override
        public SnowflakeIdWorker getObject() throws BeansException {

            final long CUR_MAX_WORK_ID = digit16Flag ? MAX_WORK_ID_16 : MAX_WORK_ID_18;
            if (maxWorkId == null) {
                maxWorkId = CUR_MAX_WORK_ID;
            } else {
                if (maxWorkId > CUR_MAX_WORK_ID) {
                    throw new RuntimeException("maxWorkId配置不正确,不能大于" + CUR_MAX_WORK_ID);
                }
            }

            long myId = 0;
            int count = 0;
            while (count < CUR_MAX_WORK_ID) {
                count++;
                myId = nextMyId();
                if (myId < minWorkId || myId > maxWorkId) {
                    continue;
                }
                workIdBucket = redissonClient.getBucket(idWorkerName + ":"+ myId);
                if (workIdBucket.trySet(ipPid, keyTimeout, TimeUnit.SECONDS)) {
                    break;
                }

            }

            if (workIdBucket == null) {
                throw new RuntimeException("所有workId都被占用，无法获取,请稍后再试。minWorkId="
                    + minWorkId + ",maxWorkId=" + maxWorkId);
            }

            monitorThread = new Thread(() -> {
                log.info("启动workId超时监控线程");
                while (canRunning) {
                    try {
                        TimeUnit.SECONDS.sleep(keyTimeout / 3);
                        if (!workIdBucket.expire(keyTimeout, TimeUnit.SECONDS)) {
                            workIdBucket.trySet(ipPid, keyTimeout, TimeUnit.SECONDS);
                        }
                    } catch (InterruptedException e) {
                        //ignore
                    } catch (Exception ex) {
                        log.warn("更新workId的超时时间失败:{}", ex.getMessage());
                    }
                }
                log.info("退出workId超时监控线程");
            });
            monitorThread.setName("WORKID_MONITOR_" + myId + "_" + monitorThread.getId());
            monitorThread.start();

            return new SnowflakeIdWorker(myId, digit16Flag);
        }

        private long nextMyId() {
            RAtomicLong atomicLong = redissonClient.getAtomicLong(idWorkerName);
            long workerCount = atomicLong.incrementAndGet();
            long myId;
            if (digit16Flag) {
                myId = workerCount % (MAX_WORK_ID_16 + 1);
            } else {
                myId = workerCount % (MAX_WORK_ID_18 + 1);
            }
            if (workerCount>MAX_WORK_ID_18) {
                atomicLong.compareAndSet(workerCount, myId);
            }
            return myId;
        }


        @Override
        public Class<? extends SnowflakeIdWorker> getObjectType() {
            return SnowflakeIdWorker.class;
        }


        @Override
        public boolean isSingleton() {
            return true;
        }


        @Override
        public void destroy() throws Exception {
            this.canRunning = false;
            monitorThread.interrupt();
            monitorThread.join();
            //删除workId
            workIdBucket.delete();
        }
    }



}
