package com.gogant.spider.core;

import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.URL;
import java.net.URLConnection;
import java.util.concurrent.TimeUnit;

public abstract class Spider<Req extends Request, Resp extends Response, Conn extends URLConnection> {
    private ConcurrentList<URLHandler<Req, Resp>> urlHandlers = new ConcurrentList<URLHandler<Req, Resp>>();
    private ConcurrentList<RequestHandler<Req>> requestHandlers = new ConcurrentList<RequestHandler<Req>>();
    private ConcurrentList<ResponseHandler<Resp>> responseHandlers = new ConcurrentList<ResponseHandler<Resp>>();
    private ConcurrentList<ExceptionHandler> exceptionHandlers = new ConcurrentList<ExceptionHandler>();
    private QueueWorkerPool<Req, Resp, Conn> workerPool;
    private URLQueue urlQueue = new PriorityURLQueue();
    private SpiderThread spiderThread = new SpiderThread();
    private URLCache urlCache = new URLCache();
    private Proxy proxy = null;

    private PickerConfigurer pickerConfigurer;
    private int readTimeout = Consts.DEFAULT_READ_TIMEOUT;
    private int connectTimeout = Consts.DEFAULT_CONNECT_TIMEOUT;
    private int reconnectAttempts = Consts.DEFAULT_RECONNECT_ATTEMPTS;
    private int fetchMode = Consts.BREADTH_FIRST;

    protected Spider(int nThreads, ConnectorFactory<? extends Spider<Req, Resp, Conn>, Req, Resp, Conn> factory) {
        workerPool = new QueueWorkerPool<Req, Resp, Conn>(this, nThreads, factory);
        workerPool.setIdleTimeout(Consts.DEFAULT_IDLE_TIMEOUT);
    }

    /**
     * 添加URL关联Handler，当onFilter返回true时，URL将会被此Handler处理：onRequest, onResponse,
     * onException
     *
     * @param handler
     */
    public void addURLHandler(URLHandler<Req, Resp> handler) {
        urlHandlers.add(handler);
    }

    /**
     * 移除URL关联的Handler
     *
     * @param handler
     */
    public void removeURLHandler(URLHandler<Req, Resp> handler) {
        urlHandlers.remove(handler);
    }

    protected final URLHandler<Req, Resp>[] getURLHandlerArray() {
        return urlHandlers.getArray(URLHandler.class);
    }

    /**
     * 添加全局的请求处理Handler，spider每个URL发送Request前都会被此Handler处理：onRequest
     *
     * @param handler
     */
    public final void addGlobalHandler(RequestHandler<Req> handler) {
        requestHandlers.add(handler);
    }

    /**
     * 移除全局的请求处理Handler
     *
     * @param handler
     * @return
     */
    public final boolean removeGlobalHandler(RequestHandler<Req> handler) {
        return requestHandlers.remove(handler);
    }

    protected final RequestHandler<Req>[] getGlobalRequestHandlerArray() {
        return requestHandlers.getArray(RequestHandler.class);
    }

    /**
     * 添加全局的响应处理Handler，spider每个URL获取Response后都会被此Handler处理：onResponse
     *
     * @param handler
     */
    public final void addGlobalHandler(ResponseHandler<Resp> handler) {
        responseHandlers.add(handler);
    }

    /**
     * 移除全局的响应处理Handler
     *
     * @param handler
     * @return
     */
    public final boolean removeGlobalHandler(ResponseHandler<Resp> handler) {
        return responseHandlers.remove(handler);
    }

    protected final ResponseHandler<Resp>[] getGlobalResponseHandlerArray() {
        return responseHandlers.getArray(ResponseHandler.class);
    }

    /**
     * 添加全局的异常处理Handler，spider每个URL处理出现异常时都会被出Handler处理：onException
     *
     * @param handler
     */
    public final void addGlobalHandler(ExceptionHandler handler) {
        exceptionHandlers.add(handler);
    }

    /**
     * 移除全局的异常处理Handler
     *
     * @param handler
     * @return
     */
    public final boolean removeGlobalHandler(ExceptionHandler handler) {
        return exceptionHandlers.remove(handler);
    }

    protected final ExceptionHandler[] getGlobalExceptionHandlerArray() {
        return exceptionHandlers.getArray(ExceptionHandler.class);
    }

    /**
     * 获取网络连接代理
     *
     * @return
     */
    public final Proxy getProxy() {
        return proxy;
    }

    /**
     * 设置网络连接代理
     *
     * @param proxy
     */
    public final void setProxy(Proxy proxy) {
        this.proxy = proxy;
    }

    /**
     * 启用URLPicker，用于在自动爬取时从爬取的内容中获取后续爬取的URL
     *
     * @param picker
     */
    public final PickerConfigurer enableURLPicker(URLPicker<Resp> picker) {
        if (picker == null) {
            throw new IllegalArgumentException("url picker can not be null");
        }
        this.pickerConfigurer = new PickerConfigurer(picker);
        return this.pickerConfigurer;
    }

    /**
     * 获取URLPicker配置
     *
     * @return
     */
    protected final PickerConfigurer getPickerConfigurer() {
        return pickerConfigurer;
    }

    /**
     * 获取爬虫线程池的线程空闲超时时间（单位：毫秒）
     *
     * @return
     */
    public final int getIdleTimeout() {
        return workerPool.getIdleTimeout();
    }

    /**
     * 设置爬虫线程池的线程空闲超时时间（单位：毫秒），超时后线程将会被回收
     *
     * @param timeout
     */
    public final void setIdleTimeout(int timeout) {
        workerPool.setIdleTimeout(timeout);
    }

    /**
     * 获取爬虫工作间隔（单位：毫秒）
     *
     * @return
     */
    public final int getWorkInterval() {
        return workerPool.getWorkInterval();
    }

    /**
     * 设置爬虫工作间隔（单位：毫秒），用于控制爬虫的工作频率，确保任意两次请求的时间至少大于次间隔（多线程下同样如此）
     *
     * @param interval
     */
    public final void setWorkInterval(int interval) {
        workerPool.setWorkInterval(interval);
    }

    /**
     * 获取连接超时时间（单位：毫秒）
     *
     * @return
     */
    public final int getConnectTimeout() {
        return connectTimeout;
    }

    /**
     * 设置连接超时时间（单位：毫秒）
     *
     * @param millis
     */
    public final void setConnectTimeout(int millis) {
        this.connectTimeout = millis;
    }

    /**
     * 获取读取超时时间（单位：毫秒）
     *
     * @return
     */
    public final int getReadTimeout() {
        return readTimeout;
    }

    /**
     * 设置读取超时时间（单位：毫秒），在连接成功后，若未在此超时时间内读取完成，则连接关闭
     *
     * @param millis
     */
    public final void setReadTimeout(int millis) {
        this.readTimeout = millis;
    }

    /**
     * 获取连接失败后的重连尝试次数
     *
     * @return
     */
    public int getReconnectAttempts() {
        return reconnectAttempts;
    }

    /**
     * 设置连接失败后的重连尝试次数，若<=0，则不进行重连
     *
     * @return
     */
    public void setReconnectAttempts(int reconnectAttempts) {
        this.reconnectAttempts = reconnectAttempts;
    }

    /**
     * 获取爬取模式
     *
     * @return
     */
    public final int getFetchMode() {
        return fetchMode;
    }

    /**
     * 设置爬取模式，0-广度优先，1-深度优先，默认为：广度优先
     *
     * @param fetchMode
     */
    public final void setFetchMode(int fetchMode) {
        this.fetchMode = fetchMode;
    }

    /**
     * 获取爬虫是否守护线程
     *
     * @return
     */
    public final boolean isDaemon() {
        return spiderThread.isDaemon();
    }

    /**
     * 设置爬虫是否守护线程，默认为非守护线程，在开始工作后不可更改。
     *
     * @param daemon
     */
    public final void setDaemon(boolean daemon) {
        spiderThread.setDaemon(daemon);
    }

    /**
     * 清除URL缓存
     */
    public final void clearURLCache() {
        this.urlCache.clear();
    }

    /**
     * 爬取URL
     *
     * @param url
     * @throws SpiderException
     */
    public final void fetch(String url) throws SpiderException {
        fetch(url, 0, 0, TimeUnit.MILLISECONDS);
    }

    /**
     * 爬取URL，当前URL深度为0，从URL中获取的URL的深度为当前+1，以此类推，最大不超过maxDepth。expire大于0时，
     * URL将会被缓存，不会重复爬取
     *
     * @param url
     * @param maxDepth 最大深度
     * @param expire   缓存时间
     * @param unit     缓存时间单位
     * @throws SpiderException
     */
    public final void fetch(String url, int maxDepth, int expire, TimeUnit unit) throws SpiderException {
        try {
            fetch(new URL(url), maxDepth, expire, unit);
        } catch (MalformedURLException e) {
            throw new SpiderException("illegal url: " + url);
        }
    }

    /**
     * 爬取URL
     *
     * @param url
     * @throws SpiderException
     */
    public final void fetch(URL url) throws SpiderException {
        fetch(url, 0, 0, TimeUnit.SECONDS);
    }

    /**
     * 爬取URL，当前URL深度为0，从URL中获取的URL的深度为当前+1，以此类推，最大不超过maxDepth。expire大于0时，
     * URL将会被缓存，不会重复爬取
     *
     * @param url
     * @param maxDepth 最大深度
     * @param expire   缓存时间
     * @param unit     缓存时间单位
     * @throws SpiderException
     */
    public final void fetch(URL url, int maxDepth, int expire, TimeUnit unit) throws SpiderException {
        Context context = new Context(url);
        context.setMaxDepth(maxDepth < 0 ? 0 : maxDepth);
        context.setExpire(expire < 0 ? 0 : unit.toMillis(expire));
        addFetchContext(context);
    }

    /**
     * 爬取URL，并绑定对应的Handler，handler的onFilter方法将无效，onRequest、onResponse、
     * onException方法处理爬取的各个阶段。全局的Handler同样对此有效
     *
     * @param url
     * @param handler
     * @throws SpiderException
     */
    public final void syncFetch(URL url, URLHandler<Req, Resp> handler) throws SpiderException {
        if (spiderThread.isDestroyed()) {
            throw new SpiderException("spider is destroyed");
        }
        Context context = new Context(url);
        context.addHandler(handler);
        workerPool.execute(context, 0);
    }

    /**
     * 爬取URL，并绑定对应的Handler，handler的onFilter方法将无效，onRequest、onResponse、
     * onException方法处理爬取的各个阶段。全局的Handler同样对此有效
     *
     * @param url
     * @param handler
     * @throws SpiderException
     */
    public final void syncFetch(String url, URLHandler<Req, Resp> handler) throws SpiderException {
        try {
            syncFetch(new URL(url), handler);
        } catch (MalformedURLException e) {
            throw new SpiderException("illegal url: " + url);
        }
    }

    /**
     * 添加待爬取的URL到待处理队列中
     *
     * @param context
     * @throws SpiderException
     */
    protected final void addFetchContext(Context context) throws SpiderException {
        if (!spiderThread.isRunning()) {
            synchronized (this) {
                spiderThread.startup();
            }
        }
        if (!urlCache.put(context.getURL(), context.getExpire())) {
            return;
        }
        try {
            urlQueue.put(context);
        } catch (InterruptedException e) {
            throw new SpiderException("operation is interrupted");
        }
    }

    /**
     * 获取待处理队列中的URL数量
     *
     * @return
     */
    public final int getRemainURLCount() {
        return urlQueue.size();
    }

    /**
     * 获取正在处理中的URL数量
     *
     * @return
     */
    public final int getWoringURLCount() {
        return workerPool.getWorkingCount();
    }

    /**
     * 销毁爬虫
     */
    public final void destroy() {
        synchronized (this) {
            spiderThread.destroy();
            urlQueue.clear();
            urlCache.clear();
            workerPool.destroy();
        }
    }

    private class SpiderThread implements Runnable {
        boolean isRunning = false;
        boolean isDaemon = false;
        boolean isDestroyed = false;
        Thread runnerThread = null;

        public SpiderThread() {
        }

        public boolean isRunning() {
            return isRunning;
        }

        public void setDaemon(boolean daemon) {
            if (runnerThread == null) {
                isDaemon = daemon;
            }
        }

        public boolean isDaemon() {
            return isDaemon;
        }

        public boolean isDestroyed() {
            return isDestroyed;
        }

        public synchronized void startup() throws SpiderException {
            if (isDestroyed) {
                throw new SpiderException("spider is destroyed");
            }
            if (isRunning) {
                return;
            }
            isRunning = true;
            runnerThread = new Thread(this, "SpiderThread");
            runnerThread.setDaemon(isDaemon);
            runnerThread.start();
        }

        public synchronized void destroy() {
            if (!isRunning) {
                return;
            }
            isRunning = false;
            isDestroyed = true;
            if (runnerThread != null) {
                runnerThread.interrupt();
                try {
                    runnerThread.join();
                } catch (InterruptedException e) {
                }
                runnerThread = null;
            }
        }

        public void run() {
            while (isRunning) {
                Context context = null;
                try {
                    context = urlQueue.take();
                } catch (Throwable e) {
                }
                if (context == null) {
                    continue;
                }
                try {
                    workerPool.submit(context);
                } catch (Throwable e) {
                    for (ExceptionHandler handler : getGlobalExceptionHandlerArray()) {
                        try {
                            handler.onException(context, e);
                        } catch (Throwable t) {
                        }
                    }
                }
            }
        }
    }
}
