package com.ww.springboot.boot.controller;

import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.ObjectUtil;
import org.springframework.data.domain.Range;
import lombok.AllArgsConstructor;
import org.springframework.data.redis.connection.stream.*;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;


/**
 * redis 实际运用
 *
 * @author ZH500
 */
@RestController
@RequestMapping("redis")
@AllArgsConstructor
public class RedisApplyController {

    private final RedisTemplate redisTemplate;

    public static List<String> list = new ArrayList<String>(3) {{
        add("A");
        add("B");
        add("C");
    }};

    public static ThreadPoolExecutor executorService = new ThreadPoolExecutor(8, 8, 60
            , TimeUnit.MILLISECONDS, new LinkedBlockingDeque<>());

    /**
     * 现有三台机器 多个线程循环调用
     * 三台机器每次只能运行一个实例,机器忙时提示等待,不允许出现一个机器同时操作多个实例的情况
     */
    @RequestMapping("balance")
    public String balance() throws InterruptedException {
        System.out.println(list);
        for (int i = 0; i <= 10000; i++) {
            if (list.size() > 3) {
                throw new RuntimeException("异常分配:" + list);
            }
            Thread.sleep(3);
            executorService.submit(() -> {
                if (list.size() > 0) {
                    run(list.remove(0));
                } else {
                    System.out.println("无可用机器。。。");
                }
            });
        }
        return "";
    }

    private void run(String str) {
        System.out.println(str + "运行中。。。");
        try {
            Thread.sleep((long) (Math.random() * 10));
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println(str + "释放。。。");
        list.add(str);
    }


    /**
     * hyperLogLog
     * 适用于量大的情况
     */
    @GetMapping("hyperLogLog")
    public String hyperLogLog(Integer num) {
        List<String> list = new ArrayList<>();
        for (int i = 0; i < num; i++) {
//            Snowflake snowflake = IdUtil.createSnowflake(5, 5);
            String id = IdUtil.randomUUID();
            list.add(id);
            //redisTemplate.opsForHyperLogLog().add("mem", id);
            //System.out.println("插入值:" + id + "---------添加结果:" + l + "---------数量:" + redisTemplate.opsForHyperLogLog().size("mem"));
        }
        redisTemplate.opsForHyperLogLog().add("mem", list.toArray());
        return "";
    }


    /**
     * stream
     * stream
     */
    @GetMapping("produce")
    public String produce(Integer num) {
        for (int i = 0; i < num; i++) {
            Map<Object, Object> map = new HashMap<>();
            map.put(num, num);
            redisTemplate.opsForStream().add("queue", map);
        }

//        StreamInfo.XInfoConsumers consumers = redisTemplate.opsForStream().consumers("queue", "group1");
//        consumers.forEach(xInfoConsumer -> {
//            System.out.println(xInfoConsumer);
//        });
//        List queue = redisTemplate.opsForStream().range("queue", Range.unbounded());
//        System.out.println(queue);
//        List read = redisTemplate.op
//        sForStream().read(StreamOffset.from(MapRecord.create("queue", new HashMap<>(1)).withId(RecordId.of("1677118595144-0"))));
//        System.out.println(read);
//        Map<String, Object> group1 = redisTemplate.opsForStream().groups("group1").get(0).getRaw();
//        System.out.println(group1);
        return "";
    }

    @GetMapping("consumer")
    public String consumer() {
        new Thread(() -> {
            while (true) {
                List<MapRecord<String, Object, Object>> read = redisTemplate.opsForStream().read(Consumer.from("group1", "queue")
                        , StreamReadOptions.empty().count(1).block(Duration.ofSeconds(2))
                        , StreamOffset.create("queue", ReadOffset.lastConsumed()));
                if (ObjectUtil.isNotEmpty(read)) {
                    RecordId id = read.get(0).getId();
                    Map<Object, Object> map = read.get(0).getValue();
                    System.out.println("group1" + "-----" + id + "-----" + map);
                }
            }
        }).start();

        new Thread(() -> {
            while (true) {
                List<MapRecord<String, Object, Object>> read = redisTemplate.opsForStream().read(Consumer.from("group2", "queue")
                        , StreamReadOptions.empty().count(1).block(Duration.ofSeconds(2))
                        , StreamOffset.create("queue", ReadOffset.lastConsumed()));
                if (ObjectUtil.isNotEmpty(read)) {
                    RecordId id = read.get(0).getId();
                    Map<Object, Object> map = read.get(0).getValue();
                    System.out.println("group2" + "-----" + id + "-----" + map);
                }
                //block->会阻塞 不会一直调接口
            }
        }).start();
        return "";
    }

    /**
     * stream
     * stream
     */
    @GetMapping("streamGroup")
    public String streamGroup(Integer num) {
        redisTemplate.opsForStream().destroyGroup("queue", "group1");
        redisTemplate.opsForStream().destroyGroup("queue", "group2");

        redisTemplate.opsForStream().createGroup("queue", ReadOffset.from("0"), "group1");
        redisTemplate.opsForStream().createGroup("queue", ReadOffset.from("0"), "group2");
        return "";
    }

}
