package com.jms.crawler.context;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.jms.base.entity.basicEntity.Param;
import com.jms.base.service.ParamService;
import com.jms.base.utils.DateUtils;
import com.jms.crawler.entity.basicEnity.CrawlTask;
import com.jms.crawler.entity.basicEnity.CrawlTaskStatus;
import com.jms.crawler.entity.basicEnity.CrawlTaskType;
import com.jms.crawler.mapper.CrawlTaskMapper;
import com.jms.crawler.service.DataCrawlService;
import com.jms.crawler.service.WeiboDataCrawlService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.interceptor.TransactionAspectSupport;

import javax.annotation.PostConstruct;
import java.io.File;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@Component
public class DataCrawlContext {

    //最大爬取服务数量，默认为5，可通过maxCrawlNumber参数配置
    private int maxCrawlNumber = 5;
    private final static Logger LOGGER = LogManager.getLogger(DataCrawlContext.class);
    //爬取线程
    private ExecutorService crawlTasks;
    //爬取服务列表 {“taskName”: CrawlService}
    private Map<String, DataCrawlService> crawlServiceList;
    private CrawlTaskMapper crawlTaskMapper;
    private ParamService paramService;
    //爬取服务数量
    private int taskNumber;

    @Value("${jms.crawl.path}")
    private String crawlPath;

    @Autowired
    public void setParamService(ParamService paramService) {
        this.paramService = paramService;
    }

    @Autowired
    public void setCrawlTaskMapper(CrawlTaskMapper crawlTaskMapper) {
        this.crawlTaskMapper = crawlTaskMapper;
    }

    @PostConstruct
    public void init() {
        if (crawlPath != null) {
            //爬取文件路径初始化
            File file = new File(crawlPath);
            if (!file.exists()) file.mkdirs();
        }
        if (taskNumber == 0) {
            //获取参数配置的最大爬取服务数量
            Param param = paramService.getParamInfoByCode("maxCrawlNumber");
            if (param != null && param.getChildren() != null && !param.getChildren().isEmpty()) {
                maxCrawlNumber = Integer.parseInt(param.getChildren().get(0).getContent());
            }
            crawlTasks = Executors.newFixedThreadPool(maxCrawlNumber);
            crawlServiceList = new HashMap<>();
        }
    }


    public DataCrawlService getService(String taskName) {
        return crawlServiceList.get(taskName);
    }

    @Transactional
    public synchronized void removeService(String taskName) {
        try {
            //从正在进行的任务列表中清除
            crawlServiceList.remove(taskName);
            //将任务置为完成状态
            UpdateWrapper<CrawlTask> updateWrapper = new UpdateWrapper<>();
            updateWrapper.set("taskStatus", CrawlTaskStatus.END.name())
                    .set("alterTime", DateUtils.getFormatDate(DateUtils.PATTERN_ONE))
                    .eq("taskName", taskName);
            crawlTaskMapper.update(null, updateWrapper);
            //如果是从满任务状态变成了单一状态
            if (--taskNumber == maxCrawlNumber - 1) {
                //查询是否有等待中的任务
                QueryWrapper<CrawlTask> queryWrapper = new QueryWrapper<>();
                queryWrapper.eq("taskStatus", CrawlTaskStatus.WAITING.name())
                        .orderByAsc("createTime");
                List<CrawlTask> waitList = crawlTaskMapper.selectList(queryWrapper);
                //如果等待任务存在，则将最早创建的任务提交
                if (waitList != null && !waitList.isEmpty()) {
                    CrawlTask crawlTask = waitList.get(0);
                    JSONObject content = JSON.parseObject(crawlTask.getTaskContent());
                    //更新任务状态
                    updateWrapper = new UpdateWrapper<>();
                    updateWrapper.set("taskStatus", CrawlTaskStatus.RUNNING.name())
                            .set("alterTime", DateUtils.getFormatDate(DateUtils.PATTERN_ONE))
                            .eq("taskName", crawlTask.getTaskName());
                    crawlTaskMapper.update(null, updateWrapper);
                    //提交任务
                    if (CrawlTaskType.WeiBo.name().equals(crawlTask.getTaskType())) {
                        Map<String, Object> map = convertJSONObjectToMap(content);
                        executeWeiboTask(map);
                    }
                }
            }
        } catch (Exception e) {
            LOGGER.error(e.getMessage(), e);
            TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
        }
    }

    /**
     * 是否有闲置的任务执行者
     * @return boolean
     */
    public boolean haveUnusedTasker() {
        return taskNumber >= maxCrawlNumber;
    }

    /**
     * 提交爬取线程
     * @param taskContent 爬取任务相关数据
     * {taskName: "", params: {...}}
     */
    public void executeWeiboTask(Map<String, Object> taskContent) {
        String taskName = (String) taskContent.get("taskName");
        Runnable task = new Thread(new WeiboDataCrawlThread(taskContent), taskName);
        crawlTasks.execute(task);
    }

    /**
     * 将数据库中存储的json转化为能被识别的map类型
     * @param jsonObject json
     * @return map
     */
    private Map<String, Object> convertJSONObjectToMap(JSONObject jsonObject) {
        Map<String, Object> taskContent = new HashMap<>();
        taskContent.put("taskName", jsonObject.get("taskName"));
        Map<String, Object> params = new HashMap<>();
        JSONObject jsonParams = jsonObject.getJSONObject("params");
        Set<Map.Entry<String, Object>> keyValues = jsonParams.entrySet();
        for (Map.Entry<String, Object> keyValue : keyValues) {
            if (keyValue.getValue() instanceof JSONArray) {
                params.put(keyValue.getKey(), ((JSONArray) keyValue.getValue()).toArray(new String[0]));
            } else if (keyValue.getValue() instanceof Long) {
                params.put(keyValue.getKey(), new Date((Long) keyValue.getValue()));
            } else {
                params.put(keyValue.getKey(), keyValue.getValue());
            }
        }
        taskContent.put("params", params);
        return taskContent;
    }

    /**
     * 微博爬取线程类
     */
    class WeiboDataCrawlThread implements Runnable{
        Map<String, Object> taskContent;

        public WeiboDataCrawlThread(Map<String, Object> taskContent) {
            this.taskContent = taskContent;
        }

        @Override
        public void run() {
            try{
                //step.1 建立爬取服务
                String taskName = (String) taskContent.get("taskName");
                WeiboDataCrawlService crawlService = new WeiboDataCrawlService(taskContent);
                //step.2 将爬取服务注册到服务列表中
                crawlServiceList.put(taskName, crawlService);
                ++taskNumber;
                //step.3 调用爬取服务
                crawlService.startCrawl();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
