package com.trytech.mongoocrawler.server;

import com.trytech.mongoocrawler.common.bean.MonitorData;
import com.trytech.mongoocrawler.common.enums.CrawlerStatus;
import com.trytech.mongoocrawler.server.common.db.CrawlerDataSource;
import com.trytech.mongoocrawler.server.transport.tcp.NettyTcpServer;
import com.trytech.mongoocrawler.server.xml.XmlConfigBean;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;

import java.io.File;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;

/**
 * 爬虫运行环境
 */
@Component
public abstract class CrawlerContext {
    protected final static CountDownLatch countDownLatch = new CountDownLatch(1);
    //Session Thread Map
    protected static ConcurrentHashMap<String, Thread> sessionThreadMap = new ConcurrentHashMap<String, Thread>();
    protected static ConcurrentHashMap<String, CrawlerSession> sessionMap = new ConcurrentHashMap<String, CrawlerSession>();
    protected static CrawlerConfig config;
    //爬虫服务器
    protected static NettyTcpServer crawlerServer;
    private static CrawlerContext appContext;

    public static CrawlerConfig getConfig() {
        return config;
    }

    public static CrawlerContext getCrawlerContext() {
        if(appContext == null) {
            //获取爬虫配置
            initConfig();
            XmlConfigBean xmlConfigBean = config.getConfigBean();
            CrawlerConfig.CrawlerMode crawlerMode = xmlConfigBean.getModeConfigBean().getCrawlerMode();
            //更新爬虫服务器的状态
            if (crawlerMode != null && crawlerMode.equals(CrawlerConfig.CrawlerMode.LOCAL_MODE)) {
                MonitorData.getInstance().getServerConfig().setCrawlerCount(1);
            }
            MonitorData.getInstance().getServerConfig().setMode(crawlerMode.getValue());
            MonitorData.getInstance().getServerConfig().setModeLabel(crawlerMode.getLabel());
            MonitorData.getInstance().getServerConfig().setRunStatus(CrawlerStatus.RUNNING.getCode());
            MonitorData.getInstance().getServerConfig().setRunStatusLabel(CrawlerStatus.RUNNING.getValue());
            //启动爬虫线程
            if (crawlerMode.equals(CrawlerConfig.CrawlerMode.DISTRIBUTED_MODE)) {
                appContext = new DistributedCrawlerContext();
                return appContext;
            }
            appContext = new LocalCrawlerContext();

        }
        return appContext;
    }


    private static void initConfig() {
        //加载配置文件
        try {
            config = CrawlerConfig.newInstance(System.getProperty("user.dir") + File.separator + "config.xml");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void start() {
        //加载session启动爬虫
        doInitSession();
        //启动爬虫服务器的初始化逻辑
        doInitServer();
        System.out.println("爬取结束");
    }


    protected abstract void doInitSession();

    protected abstract void doInitServer();

    public void destory() {
        //关闭数据源
        if (config != null) {
            for (CrawlerDataSource dataSource : XmlConfigBean.getAllDataSources()) {
                dataSource.destory();
            }
        }
        sessionMap.clear();
        sessionMap = null;
        countDownLatch.countDown();
    }

    public void removeSession(String sessionId) {
        sessionMap.remove(sessionId);
    }

    public void registerSession(CrawlerSession session) {
        sessionMap.put(session.getSessionId(), session);
    }

    public void checkStatus() {
        for (CrawlerSession session : sessionMap.values()) {
            if (session.isPaused()) {
                sessionThreadMap.get(session.getSessionId()).interrupt();
            }
            if (!session.isDestoryed()) {
                return;
            }
        }
        destory();
    }

    public CountDownLatch getCountDownLatch() {
        return countDownLatch;
    }

    public void interruptSession(String sessionId) {
        sessionThreadMap.get(sessionId).interrupt();
    }

    public CrawlerSession getSession(String sessionId) {
        if (StringUtils.isEmpty(sessionId)) {
            return (CrawlerSession) sessionMap.values().toArray()[Math.round((float) Math.random() * 100) % sessionMap.values().size()];
        }
        return sessionMap.get(sessionId);
    }
}
