package com.afdor.rws.core.id.generator.impl.snowflake.redis;


import com.afdor.rws.annotation.extension.Extension;
import com.afdor.rws.core.id.generator.impl.snowflake.SnowflakeIdGenerator;
import com.afdor.rws.core.strategy.IdGeneratorStrategy;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.ToString;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;

import java.util.ArrayList;
import java.util.List;

/**
 * 推特雪花片算法 + Redis实现，不能很好的处理workerId丢失问题
 * 一个long类型的数据，64位。以下是每位的具体含义。
 * <br/>
 * snowflake的结构如下(每部分用-分开):
 * <br/>
 * 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000
 * <br/>
 * （1）第一位为未使用
 * （2）接下来的41位为毫秒级时间(41位的长度可以使用69年)
 * （3）然后是5位datacenterId
 * （4）5位workerId
 * （5）最后12位是毫秒内的计数（12位的计数顺序号支持每个节点每毫秒产生4096个ID序号）
 * <br/>
 * 一共加起来刚好64位，为一个Long型。(转换成字符串长度为18)
 *
 * @author 悭梵
 * @date Created in 2018-09-05 16:53
 */
@Deprecated
@ToString
@EqualsAndHashCode
@NoArgsConstructor
@Extension(value = "snowflakeRedisId", order = Extension.NORMAL_ORDER - 90)
public class SnowflakeRedisIdGeneratorStrategyImpl extends SnowflakeIdGenerator implements IdGeneratorStrategy<Long>, DisposableBean {

    private static final String WORKER_ID = "WorkerId/";
    private static final String DATACENTER_ID = "DatacenterId/";
    private static final String ROOT_PATH = "/RWS/IdGenerator/";

    private RedisTemplate redisTemplate;

    public SnowflakeRedisIdGeneratorStrategyImpl(final RedisTemplate redisTemplate) {
        init(redisTemplate);
    }

    // ==============================Public Methods==========================================
    public synchronized void init(final RedisTemplate redisTemplate) {
        this.redisTemplate = redisTemplate;
        // 1、获取数据中心Id
        setDatacenterId(redisTemplate);

        // 2、获取数据中心WorkerId
        setWorkerId(redisTemplate);

        // 3、启动时间同步线程，为了高效生产ID，时间每秒同步一次，可调整同步策略
        Thread syncTimestampThread = new Thread(new SnowflakeIdGenerator.SyncTimestampWorker(), "sync-timestamp");
        syncTimestampThread.setDaemon(true);
        syncTimestampThread.start();


        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
            @Override
            public void run() {
                HashOperations<String, String, String> hashOperations = redisTemplate.opsForHash();
                hashOperations.delete(ROOT_PATH + WORKER_ID, String.valueOf(workerId));
            }
        }));
    }

    @Override
    public synchronized Long generate() {
        return nextId();
    }

    @Override
    public synchronized List<Long> generate(int size) {
        List<Long> idList = new ArrayList<>(size);
        for (int i = 0; i < size; i++) {
            idList.add(nextId());
        }
        return idList;
    }

    private void setWorkerId(final RedisTemplate redisTemplate) {
        try {
            long workerId = -1L;
            HashOperations<String, String, String> hashOperations = redisTemplate.opsForHash();
            if (hashOperations.size(ROOT_PATH + WORKER_ID) == 0) {
                boolean status = hashOperations.putIfAbsent(ROOT_PATH + WORKER_ID, "0", StringUtils.EMPTY);
                if (status) {
                    workerId = 0L;
                } else {
                    setWorkerId(redisTemplate);
                }
            } else {
                String key = null;
                boolean status = false;
                for (long i = 0L, size = maxWorkerId; i <= size; i++) {
                    key = String.valueOf(i);
                    status = hashOperations.hasKey(ROOT_PATH + WORKER_ID, key);
                    if (!status) {
                        status = hashOperations.putIfAbsent(ROOT_PATH + WORKER_ID, key, StringUtils.EMPTY);
                        if (status) {
                            workerId = i;
                        } else {
                            setWorkerId(redisTemplate);
                        }
                        break;
                    }
                }
            }

            if (workerId > this.maxWorkerId || workerId < 0) {
                throw new IllegalArgumentException(String.format("worker id can't be greater than %d or less than 0", this.maxWorkerId));
            }
            this.workerId = workerId;
        } catch (Exception e) {
            if (e instanceof IllegalArgumentException) {
                throw (IllegalArgumentException) e;
            }
            throw new IllegalArgumentException("failed to get worker id from redis", e);
        }
    }

    private void setDatacenterId(final RedisTemplate redisTemplate) {
        try {
            ValueOperations<String, String> valueOperations = redisTemplate.opsForValue();
            String datacenterId = valueOperations.get(ROOT_PATH + DATACENTER_ID);
            if (StringUtils.isBlank(datacenterId)) {
                boolean status = valueOperations.setIfAbsent(ROOT_PATH + DATACENTER_ID, "0");
                if (status) {
                    this.datacenterId = 0L;
                } else {
                    setDatacenterId(redisTemplate);
                }
            } else {
                this.datacenterId = NumberUtils.createLong(datacenterId);
            }
            if (this.datacenterId > this.maxDatacenterId || this.datacenterId < 0) {
                throw new IllegalArgumentException(String.format("datacenter id can't be greater than %d or less than 0", this.maxDatacenterId));
            }
        } catch (Exception e) {
            if (e instanceof IllegalArgumentException) {
                throw (IllegalArgumentException) e;
            }
            throw new IllegalArgumentException("failed to get datacenter id from redis", e);
        }
    }

    @Override
    public void destroy() throws Exception {
        HashOperations<String, String, String> hashOperations = redisTemplate.opsForHash();
        hashOperations.delete(ROOT_PATH + WORKER_ID, String.valueOf(workerId));
    }
}
