package com.ruoyi.web.controller.timer;

import com.google.common.collect.Maps;
import com.ruoyi.system.domain.SysWhitePaperOutline;
import com.ruoyi.system.domain.SysWhitePaperSurveyResult;
import com.ruoyi.system.domain.SysWhitePaperSurveyTask;
import com.ruoyi.system.service.ISysWhitePaperOutlineService;
import com.ruoyi.system.service.ISysWhitePaperSurveyResultService;
import com.ruoyi.system.service.ISysWhitePaperSurveyTaskService;
import com.ruoyi.system.webcrawler.CrawlRequest;
import com.ruoyi.system.webcrawler.CrawlResult;
import com.ruoyi.system.webcrawler.WebCrawlerService;
import com.ruoyi.web.controller.common.CommonController;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.Map;

@Component
public class WebsiteCrawlTimer {
    @Autowired
    private ISysWhitePaperSurveyTaskService sysWhitePaperSurveyTaskService;
    @Autowired
    private ISysWhitePaperSurveyResultService sysWhitePaperSurveyResultService;
    @Autowired
    private ISysWhitePaperOutlineService sysWhitePaperOutlineService;
    @Autowired
    private WebCrawlerService crawlerService;
    private static final Logger log = LoggerFactory.getLogger(WebsiteCrawlTimer.class);

    /**
     * 轮询爬取网站数据
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void websiteCraw() {
        log.info("start timer tasks");
        // 查询所有的执行中的网站爬取任务
        List<SysWhitePaperSurveyTask> runTaskList = sysWhitePaperSurveyTaskService.listSurveyTaskOfRun();
        log.info("tasks number = " + runTaskList.size());

        if (CollectionUtils.isNotEmpty(runTaskList)) {

            for (SysWhitePaperSurveyTask runTask:runTaskList) {
                try {
                    CrawlRequest request = new CrawlRequest();
                    request.setDataType("html");
                    request.setUrl(runTask.getInstructionWorkflow());
                    CrawlResult crawlResult = crawlerService.crawlWebsite(request);

                    // 解析网站爬取结果
                    Map<String,String> websiteMap = this.getWebsiteContent(crawlResult);
                    String websiteContent = websiteMap.get("websiteContent");
                    String websiteTitle = websiteMap.get("websiteTitle");

                    // 将爬取结果保存至调研结果表中
                    SysWhitePaperSurveyResult sysWhitePaperSurveyResult = new SysWhitePaperSurveyResult();
                    sysWhitePaperSurveyResult.setProjectId(runTask.getProjectId());
                    sysWhitePaperSurveyResult.setTaskId(runTask.getId());
                    sysWhitePaperSurveyResult.setSurveyResultTitle(websiteTitle);
                    sysWhitePaperSurveyResult.setSurveyResult(websiteContent);
                    sysWhitePaperSurveyResultService.insertSysWhitePaperSurveyResult(sysWhitePaperSurveyResult);

                    // 根据大纲ID查询大纲信息
                    SysWhitePaperOutline whitePaperOutline = sysWhitePaperOutlineService.selectSysWhitePaperOutlineById(runTask.getOutlineId());

                    // 更新大纲表的AI调研分析结果
                    SysWhitePaperOutline whitePaperOutlineForUpdate = new SysWhitePaperOutline();
                    whitePaperOutlineForUpdate.setId(whitePaperOutline.getId());
                    whitePaperOutlineForUpdate.setAiAnalyseResultTitle(websiteTitle);
                    whitePaperOutlineForUpdate.setAiAnalyseResult(whitePaperOutline.getAiAnalyseResult() + ";" + websiteContent);
                    sysWhitePaperOutlineService.updateSysWhitePaperOutline(whitePaperOutlineForUpdate);
                } catch (Exception e) {
                    log.error("爬取数据失败，原因：{}",e);
                }
            }
        }
    }

    /**
     * 解析网站爬取结果
     */
    private Map<String,String> getWebsiteContent(CrawlResult crawlResult) {
        Map<String,String> resultMap = Maps.newHashMap();
        String websiteTitle = "";
        StringBuilder websiteContentBuilder = new StringBuilder();
        List<Map<String, String>> dataList = crawlResult.getData();
        if (CollectionUtils.isNotEmpty(dataList)) {
            for (Map<String, String> dataMap:dataList) {
                String h1 = dataMap.get("h1");
                if (StringUtils.isNotEmpty(h1)) {
                    websiteContentBuilder.append(h1 + ",");
                }

                String h2 = dataMap.get("h2");
                if (StringUtils.isNotEmpty(h2)) {
                    websiteContentBuilder.append(h2 + ",");
                }

                String title = dataMap.get("title");
                if (StringUtils.isNotEmpty(title)) {
                    websiteContentBuilder.append(title + ",");
                }

                String metaDescription = dataMap.get("meta_description");
                if (StringUtils.isNotEmpty(title)) {
                    websiteContentBuilder.append(metaDescription);
                }

                websiteTitle = title;
            }
        }

        resultMap.put("websiteTitle",websiteTitle);
        resultMap.put("websiteContent",websiteContentBuilder.toString());
        return resultMap;
    }


}
