package com.spider.moudle.pool;

import com.google.common.collect.Maps;
import com.spider.moudle.api.Crawler;
import com.spider.moudle.api.realize.AbstractParamCrawler;
import com.spider.moudle.entity.Setting;
import com.spider.moudle.entity.SpeechBean;
import com.spider.moudle.entity.Task;
import com.spider.moudle.factory.ExecutorServiceFactoy;
import com.spider.moudle.factory.NamedThreadFactory;
import com.spider.moudle.util.CommonUtil;
import com.spider.moudle.util.SettingsUtil;

import java.util.*;
import java.util.concurrent.*;

/**
 * Created by Linbo Hu on 2017/12/19.
 * crawler pool
 */
public class CrawlerPool {

    private static CrawlerPool instance = new CrawlerPool();

    public static CrawlerPool getInstance() {
        return instance;
    }

    private final Map<String, LinkedList<Crawler>> crawlersMap = Maps.newConcurrentMap();
    /**
     * 初始化crawler数量
     */
    private CrawlerPool() {
        List<Setting> settingList = SettingsUtil.getAllSettingList();
        for(Setting setting : settingList) {
            Boolean spider_is_worked = setting.getSpider_is_worked();
            if(spider_is_worked) {
                LinkedList<Crawler> linkedList = new LinkedList<>();
                crawlersMap.put(setting.getSite(), linkedList);
            }
        }
    }

    /**
     * 初始化所有的crawler pool (非阻塞方法)
     * @param settingList
     */
    public void initialize(List<Setting> settingList) {
        ExecutorService executorService = null;
        List<FutureTask<Map<String, Object>>> futureTaskList = new ArrayList<>();
        try {
            executorService = ExecutorServiceFactoy.buildExecutorService(settingList.size(), "初始化crawler pool", new NamedThreadFactory("初始化crawler pool", "初始化crawler pool"));
            for(Setting setting : settingList) {
                Boolean spider_is_worked = setting.getSpider_is_worked();
                if(spider_is_worked) {
                    CrawlerPool.CrawlerInitialThread crawlerInitial = new CrawlerInitialThread(setting);
                    FutureTask<Map<String, Object>> futureTask = new FutureTask<>(crawlerInitial);
                    futureTaskList.add(futureTask);
                    executorService.submit(futureTask);
                }
            }
            for(int i = 0; i < futureTaskList.size(); i++) {
                FutureTask<Map<String, Object>> futureTask = futureTaskList.get(i);
                Map<String, Object> map = futureTask.get();
                LinkedList<Crawler> linkedList = (LinkedList<Crawler>)map.get("list");
                String site = (String)map.get("site");
                LinkedList<Crawler> pool = crawlersMap.get(site);
                if(pool == null) {
                    pool = new LinkedList<>();
                    pool.addAll(linkedList);
                    synchronized(crawlersMap) {
                        crawlersMap.put(site, pool);
                        crawlersMap.notifyAll();
                    }
                } else {
                    synchronized (pool) {
                        pool.addAll(linkedList);
                        pool.notifyAll();
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if(executorService != null) {
                executorService.shutdown();
            }
        }
    }

    /**
     * 构造crawler pool
     * @param setting
     * @return
     * @throws ClassNotFoundException
     * @throws IllegalAccessException
     * @throws InstantiationException
     */
    private LinkedList<Crawler> build(Setting setting) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
        String classPath = setting.getCrawler_class_path();
        Class clazz = Class.forName(classPath);
        Boolean cralwer_contruct_type = clazz.newInstance() instanceof AbstractParamCrawler;
        System.out.println(setting.getName() + " crawler构造方式：" + (cralwer_contruct_type ? "有参" : "无参"));
        LinkedList<Crawler> pool = null;
        if(cralwer_contruct_type) {
            try {
                AbstractParamCrawler paramCrawler = (AbstractParamCrawler)clazz.newInstance();
                pool = paramCrawler.initial();
            } catch (Exception e) {
                e.printStackTrace();
                System.err.println("初始化 " + classPath + " 错误，可能为无参构造方式");
            }
        } else {
            int initNum = setting.getCrawler_num();
            pool = new LinkedList<>();
            for (int i = 0; i < initNum; i++) {
                try {
                    Crawler crawler = (Crawler)clazz.newInstance();
                    if(CommonUtil.isNotNull(crawler)) {
                        pool.add(crawler);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    System.err.println("初始化 " + classPath + " 错误，可能为有参构造方式");
                }
            }
        }
        return pool;
    }

    /**
     * 根据setting将特定的crawler初始化到pool中
     * @param setting
     */
    public void initialItem(Setting setting) throws IllegalAccessException, InstantiationException, ClassNotFoundException {
        String site = setting.getSite();
        LinkedList<Crawler> linkedList = build(setting);
        LinkedList<Crawler> pool = crawlersMap.get(site);
        if(pool == null) {
            pool = new LinkedList<>();
            if(CommonUtil.isNotNull(linkedList)) {
                pool.addAll(linkedList);
            }
            synchronized (crawlersMap) {
                crawlersMap.put(site, pool);
                crawlersMap.notifyAll();
            }
        } else {
            if(CommonUtil.isNotNull(linkedList)) {
                synchronized (pool) {
                    pool.addAll(linkedList);
                    pool.notifyAll();
                }
            }
        }
    }

    /**
     * 初始化一个线程去实例化pool,有的需要登录的crawler初始化很慢，所以派线程处理
     */
    private class CrawlerInitialThread implements Callable<Map<String, Object>> {

        private Setting setting;

        public CrawlerInitialThread(Setting setting) {
            this.setting = setting;
        }

        @Override
        public Map<String, Object> call() throws Exception {
            Map<String, Object> map = new HashMap<>();
            map.put("site", setting.getSite());
            LinkedList<Crawler> pool = build(setting);
            if(CommonUtil.isNotNull(pool)) {
                map.put("list", pool);
            }
            return map;
        }
    }

    /**
     * 释放一个crawler
     * @param site
     * @param mills
     * @return
     * @throws InterruptedException
     */
    public Crawler fetchCrawler(String site, long mills) throws InterruptedException {
        LinkedList<Crawler> pool = crawlersMap.get(site);
        if (pool == null) {
            pool = new LinkedList<>();
            synchronized(crawlersMap) {
                crawlersMap.put(site, pool);
                crawlersMap.notifyAll();
            }
        }
        synchronized (pool) {
            if (mills <= 0) {
                while (pool.isEmpty()) {
                    pool.wait();
                }
                return pool.removeFirst();
            } else {
                long future = System.currentTimeMillis() + mills;
                long remaining = mills;
                while (pool.isEmpty() && remaining > 0) {
                    pool.wait(remaining);
                    remaining = future - System.currentTimeMillis();
                }
                Crawler crawler = null;
                if (!pool.isEmpty()) {
                    crawler = pool.removeFirst();
                }
                return crawler;
            }
        }
    }

    /**
     * 释放一个crawler
     * @param site
     * @param crawler
     */
    public void releaseCrawler(String site, Crawler crawler) {
        LinkedList<Crawler> pool = crawlersMap.get(site);
        if(pool == null) {
            System.err.println(site + " 下不存在对应的pool");
        } else {
            if (crawler != null) {
                synchronized (pool) {
                    pool.addLast(crawler);
                    pool.notifyAll();
                }
            }
        }
    }

}
