package com.piece.module.spider.service;

import com.piece.core.framework.enums.StatusType;
import com.piece.core.framework.util.schedule.ScheduleJob;
import com.piece.core.framework.util.schedule.ScheduleUtil;
import com.piece.core.framework.util.string.JsonUtil;
import com.piece.core.jpa.repository.BaseService;
import com.piece.core.mongo.entity.MongoJobLog;
import com.piece.core.mongo.service.MongoJobLogService;
import com.piece.core.framework.support.convert.Convert;
import com.piece.core.framework.util.time.DateUtil;
import com.piece.core.framework.util.string.StringUtil;
import com.piece.core.framework.util.file.FileUtil;
import com.piece.module.spider.concurrent.SpiderJob;
import com.piece.module.spider.executor.Spider;
import com.piece.module.spider.model.SpiderFlow;
import com.piece.module.spider.response.FlowNoticeRepository;
import com.piece.module.spider.response.SpiderFlowRepository;
import lombok.extern.slf4j.Slf4j;
import org.quartz.*;
import org.quartz.spi.OperableTrigger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.io.*;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
@Service
@Transactional
public class SpiderFlowService extends BaseService<SpiderFlow, Long> {

    private final static String KEY_PREFIX = "SPIDER_";

    @Resource
    private JdbcTemplate jdbcTemplate;

    @Autowired
    private SpiderFlowRepository spiderFlowRepository;

    @Autowired
    private FlowNoticeRepository flowNoticeRepository;

    @Autowired
    private MongoJobLogService mongoJobLogService;

    @Autowired
    private Scheduler scheduler;

    @Autowired
    private SpiderJob spiderJob;

    @PostConstruct
    public void initRepository() {
        super.setRepository(spiderFlowRepository);
    }

    @Override
    public SpiderFlow insert(SpiderFlow spiderFlow) throws Exception {
        return insertOrUpdate(spiderFlow);
    }

    @Override
    public SpiderFlow insertOrUpdate(SpiderFlow spiderFlow) throws Exception {
        Long spiderId = spiderFlow.getId();
        if (null != spiderId) {
            spiderFlowRepository.update(spiderFlow);
            ScheduleUtil.deleteJob(scheduler, spiderFlow.getName(), KEY_PREFIX + spiderFlow.getId());
        } else {
            spiderFlow.setEnabled(StatusType.NORMAL.getCode());
            spiderFlow = spiderFlowRepository.insert(spiderFlow);
        }

        if (StatusType.NORMAL.getCode().equals(spiderFlow.getEnabled()) && StringUtil.isNotEmpty(spiderFlow.getCron())) {
            Date nextExecuteTime = ScheduleUtil.createScheduleJob(scheduler, transToScheduleJob(spiderFlow));
            if (null != nextExecuteTime) {
                spiderFlow.setNextExecuteTime(nextExecuteTime);
                update(spiderFlow);
            }
        }

        try {
            String basePath = spiderJob.workspace + spiderFlow.getId() + File.separator + "xmls";
            basePath = FileUtil.getStoragePath(basePath);
            String fileName = System.currentTimeMillis() + ".xml";
            String serverPath = basePath + File.separator + fileName;
            FileUtil.writeByteArrayToFile(new File(serverPath), spiderFlow.getXml().getBytes());
            spiderFlow.setXmlFilePath(serverPath);
            spiderFlowRepository.update(spiderFlow);
        } catch (Exception e) {
            log.error("保存爬虫历史记录出错：{}", e.getMessage());
        }
        return spiderFlow;
    }

    @Override
    public void delete(Serializable id) throws Exception {
        SpiderFlow spiderFlow = findById(id);
        if (null != spiderFlow) {
            spiderFlowRepository.delete(id);
            ScheduleUtil.deleteJob(scheduler, spiderFlow.getName(), KEY_PREFIX + spiderFlow.getId());
            deleteXmlAndLogs(Convert.toLong(id));
            flowNoticeRepository.delete(id);
        }
    }

    private void deleteXmlAndLogs(Long id) {
        SpiderFlow spiderFlow = findById(id);
        if (null != spiderFlow && StringUtil.isNotEmpty(spiderFlow.getXmlFilePath())) {
            String path = spiderFlow.getXmlFilePath();
            path = path.substring(0, path.indexOf(spiderJob.workspace));
            File directory = new File(path + spiderJob.workspace, id + File.separator + "xmls");
            File[] files = directory.listFiles();
            if (null != files && files.length > 0) {
                Arrays.stream(files).map(f -> {
                    return f.delete();
                });
            }
            directory = new File(path + spiderJob.workspace, id + File.separator + "logs");
            files = directory.listFiles();
            if (files != null && files.length > 0) {
                Arrays.stream(files).map(f -> {
                    return f.delete();
                });
            }
        }
    }

    private ScheduleJob transToScheduleJob(SpiderFlow spiderFlow) {
        ScheduleJob scheduleJob = new ScheduleJob(KEY_PREFIX + spiderFlow.getId(), spiderFlow.getName(), spiderFlow.getCron(),
                "0", JsonUtil.toJson(spiderFlow));
        scheduleJob.setTarget(SpiderFlowService.class);
        scheduleJob.setMethod("dealScheduleJob");
        scheduleJob.setStatus(StatusType.RUN.getCode());
        scheduleJob.setRetryCount(0);
        scheduleJob.setRetryInterval(0);
        return scheduleJob;
    }

    // 项目启动后自动查询需要执行的任务进行爬取
    public void initJobs() throws Exception {
        // 清空所有任务下次执行时间
        resetNextExecuteTime();
        spiderFlowRepository.flush();
        // 获取启用corn的任务
        List<SpiderFlow> spiderFlows = spiderFlowRepository.findByCondition("enabled", StatusType.SUCCESS.getCode(), null);
        if(null != spiderFlows && !spiderFlows.isEmpty()){
            for (SpiderFlow spiderFlow : spiderFlows) {
                if (StringUtil.isNotEmpty(spiderFlow.getCron())) {
                    Date nextExecuteTime = ScheduleUtil.createScheduleJob(scheduler, transToScheduleJob(spiderFlow));
                    if (null != nextExecuteTime) {
                        spiderFlow.setNextExecuteTime(nextExecuteTime);
                        update(spiderFlow);
                    }
                }
            }
        }
    }

    public void resetNextExecuteTime() {
        String hql = "update SpiderFlow set nextExecuteTime = null";
        spiderFlowRepository.executeHql(hql);
    }

    public void dealScheduleJob(ScheduleJob scheduleJob) {
        SpiderFlow spiderFlow = JsonUtil.toBean(scheduleJob.getParam(), SpiderFlow.class);
        MongoJobLog jobLog = mongoJobLogService.findOne(scheduleJob.getLogId());
        Spider.executorInstance.submit(()->{
            try {
                spiderJob.run(spiderFlow, jobLog);
                executeCountIncrement(spiderFlow.getId(), new Date(), spiderFlow.getNextExecuteTime());
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        });
    }

    public List<SpiderFlow> selectFlows(String id) {
        String sql = "select id, name from sp_flow";
        if (StringUtil.isNotBlank(id)) {
            sql += " where id != " + Convert.toLong(id);
        }
        return jdbcTemplate.query(sql, (rs, rowNum) -> {
            SpiderFlow spiderFlow = new SpiderFlow();
            spiderFlow.setId(rs.getLong("id"));
            spiderFlow.setName(rs.getString("name"));
            return spiderFlow;
        });
    }

    public String readHistory(Long id, String timestamp) throws Exception {
        SpiderFlow spiderFlow = findById(id);
        if (null != spiderFlow && StringUtil.isNotEmpty(spiderFlow.getXmlFilePath())) {
            String path = spiderFlow.getXmlFilePath();
            path = path.substring(0, path.indexOf(spiderJob.workspace));
            path = path + spiderJob.workspace + spiderFlow.getId() + File.separator + "xmls" +  File.separator + timestamp + ".xml";
            InputStream is = new FileInputStream(new File(path));
            try {
                byte[] bytes = FileUtil.toByte(is);
                return new String(bytes, "UTF-8");
            } catch (IOException e) {
                log.error("读取爬虫历史版本出错", e);
                throw new Exception(e);
            } finally {
                if (null != is)
                    is.close();
            }
        }
        return null;
    }

    public List<Long> historyList(Long id) {
        SpiderFlow spiderFlow = findById(id);
        if (null != spiderFlow && StringUtil.isNotEmpty(spiderFlow.getXmlFilePath())) {
            String path = spiderFlow.getXmlFilePath();
            path = path.substring(0, path.indexOf(spiderJob.workspace));
            File directory = new File(path + spiderJob.workspace, id + File.separator + "xmls");
            if (directory.exists() && directory.isDirectory()) {
                File[] files = directory.listFiles((dir, name) -> name.endsWith(".xml"));
                if (null != files && files.length > 0) {
                    return Arrays.stream(files).map(f -> Long.parseLong(f.getName().replace(".xml",""))).sorted().collect(Collectors.toList());
                }
            }
        }
        return Collections.emptyList();
    }

    public void start(long id) throws Exception {
        SpiderFlow spiderFlow = findById(Convert.toLong(id));
        ScheduleUtil.deleteJob(scheduler, spiderFlow.getName(), KEY_PREFIX + spiderFlow.getId());
        spiderFlow.setEnabled(StatusType.DELETE.getCode());
        Date nextExecuteTime = ScheduleUtil.createScheduleJob(scheduler, transToScheduleJob(spiderFlow));
        if (null != nextExecuteTime) {
            spiderFlow.setNextExecuteTime(nextExecuteTime);
            update(spiderFlow);
        }
    }

    public void stop(long id) throws Exception {
        SpiderFlow spiderFlow = findById(Convert.toLong(id));
        spiderFlow.setEnabled(StatusType.NORMAL.getCode());
        spiderFlow.setNextExecuteTime(null);
        ScheduleUtil.deleteJob(scheduler, spiderFlow.getName(), KEY_PREFIX + spiderFlow.getId());
        update(spiderFlow);
    }

    public void run(long id) {
        Spider.executorInstance.submit(()->{
            try {
                spiderJob.run(findById(id), null);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        });
    }

    public void executeCountIncrement(long id, Date lastExecuteTime, Date nextExecuteTime){
        String sql = "update sp_flow set execute_count = ifnull(execute_count,0) + 1,last_execute_time = ?0 where id = ?1";
        if (null == nextExecuteTime) {
            spiderFlowRepository.executeSql(sql, new Object[]{lastExecuteTime, id});
        } else {
            sql = "update sp_flow set execute_count = ifnull(execute_count,0) + 1,last_execute_time = ?0,next_execute_time = ?1 where id = ?2";
            spiderFlowRepository.executeSql(sql, new Object[]{lastExecuteTime, nextExecuteTime, id});
        }
    }

    /**
     * 重置定时任务
     */
    public void resetCornExpression(long id, String cron) {
        SpiderFlow spiderFlow = findById(id);
        spiderFlow.setCron(cron);
        Date nextExecuteTime = ScheduleUtil.updateScheduleJob(scheduler, transToScheduleJob(spiderFlow));
        spiderFlow.setNextExecuteTime(nextExecuteTime);
        spiderFlowRepository.update(spiderFlow);
    }

    /**
     * 根据表达式获取最近几次运行时间
     */
    public List<String> getRecentTriggerTime(String cron, int numTimes) {
        List<String> list = new ArrayList<>();
        CronTrigger trigger;
        try {
            trigger = TriggerBuilder.newTrigger()
                    .withSchedule(CronScheduleBuilder.cronSchedule(cron))
                    .build();
        }catch (Exception e) {
            list.add("cron表达式 "+cron+" 有误：" + e.getCause());
            return list;
        }
        List<Date> dates = TriggerUtils.computeFireTimes((OperableTrigger) trigger, null, numTimes);
        for (Date date : dates) {
            list.add(DateUtil.formatDate(date, DateUtil.FORMAT_YYYY_MM_DD_HH_MM_SS));
        }
        return list;
    }

    public String getFlowMaxLogId(long flowId) {
        SpiderFlow spiderFlow = findById(flowId);
        if (null != spiderFlow) {
            MongoJobLog mdSysJobLog = new MongoJobLog();
            mdSysJobLog.setJobName(spiderFlow.getName());
            mdSysJobLog.setJobGroup(spiderFlow.getId().toString());
            List<MongoJobLog> logs = mongoJobLogService.findList(mdSysJobLog);
            Optional<MongoJobLog> jobLogOption = logs.stream().max((o1, o2) -> DateUtil.compareTo(o1.getStartTime(), o2.getStartTime()));
            mdSysJobLog = jobLogOption.get();
            return mdSysJobLog.getLogId();
        }
        return "";
    }
}
