package com.example.spider.task.limit;

import com.example.spider.task.AbstractSpiderTask;
import com.example.spider.task.SpiderTaskQueue;
import com.example.spider.task.Task;
import com.example.spider.util.NetWorkUtil;
import org.shoulder.core.concurrent.Threads;

import java.time.Duration;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;

/**
 * 用于下载限速，如控制下载速度平均在 1M/s
 * <p>
 * 给不同类型爬虫严格限制使用带宽，合理分配自身网络资源，主要适用爬虫机网络带宽小，或者不同网站图片 / 视频等大带宽下载
 *
 * 实现非阻塞的精准限带宽，需要预估每次下载任务预计下载byte，代价较大不建议使用，这里只实现当前本机下载速度<xxx
 *
 * @author lym
 */
public class MaxBytesLimit extends AbstractLimitStrategy {

    private long bytes;

    public MaxBytesLimit(Map<String, Object> map) {
        // only for dejson
        super(map);
        this.bytes = Long.parseLong((String) map.get("bytes"));
    }

    public MaxBytesLimit(Duration limitDuration, long bytes) {
        super(limitDuration);
        this.bytes = bytes;
    }

    public MaxBytesLimit(String key, Duration limitDuration, LimitScope limitScope, long bytes) {
        super(key, limitDuration, limitScope);
        this.bytes = bytes;
    }

    @Override
    public boolean needLimit(Task<?> task) {
        return NetWorkUtil.getCurrentSpeed().down() > bytes;
    }

    @Override
    public void doLimit(AbstractSpiderTask<?> task) {
        // 延迟放回队列重试，这里不要用阻塞方法，爬虫网络返回往往比较慢
        long waitTime = ThreadLocalRandom.current().nextLong(1, 100);
        Threads.delay(() -> SpiderTaskQueue.putTask(task), waitTime, TimeUnit.MILLISECONDS);
    }

    @Override
    public <T> void afterRequest(Task<T> task, T result, Throwable ex, long endTime) {
    }

    @Override
    public LimitStrategy copyWithNewKey(String s) {
        return new MaxBytesLimit(getKey(), getLimitDuration(), getLimitScope(), getBytes());
    }

    public long getBytes() {
        return bytes;
    }

    public void setBytes(long bytes) {
        this.bytes = bytes;
    }
}
