package com.maistars.spider.service.extract;

import com.alibaba.fastjson.JSON;
import com.maistars.spider.common.exception.TopicSpiderException;
import com.maistars.spider.service.domain.capture.CapturedPage;
import com.maistars.spider.service.domain.rule.FieldRule;
import com.maistars.spider.service.domain.rule.PageRule;
import com.maistars.spider.service.domain.task.CrawlerTaskInst;
import com.maistars.spider.service.parser.FieldRuleParser;
import lombok.extern.slf4j.Slf4j;

import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;

/**
 * @author dhr
 * @date 2021/1/17 下午10:38
 */
@Slf4j
public class ExtractEngine {

    private ExecutorService executorService;

    private CrawlerTaskInst crawlerTaskInst;

    private PageRule pageRule;

    private Map<String, Integer> columnIndex = new HashMap<>();

    private List<String> columnNames = new ArrayList<>();

    private List<FieldRule> fieldRules;

    public ExtractEngine(CrawlerTaskInst crawlerTaskInst, PageRule pageRule, int threadNum) {
        this.crawlerTaskInst = crawlerTaskInst;
        executorService = Executors.newFixedThreadPool(threadNum);
        this.pageRule = pageRule;
        this.fieldRules = pageRule.getFieldRules();
    }

    public void executeWithPage(List<CapturedPage> capturedPages) throws TopicSpiderException {
        writeColumnNames(fieldRules);

        capturedPages.forEach(capturedPage -> {
            try {
//                if (isLazy) {
//                    Document document = HtmlUnitAdapter.monitorChromeToAccessUrl(capturedPage.getCurrentUrl(), 1500);
//                    List<String> records = generateRecords(fieldRules, columnIndex, capturedPage.getCurrentUrl(), document.html());
//                    crawlerTaskInst.getTaskInstResultStore().getExtractedFieldStore().writeRecords(capturedPage.getCurrentUrl(), records);
//                }else{
                List<String> records = generateRecords(fieldRules, columnIndex, capturedPage);
                crawlerTaskInst.getTaskInstResultStore().getExtractedFieldStore().writeRecords(capturedPage.getCurrentUrl(), records);
//                }
            } catch (TopicSpiderException e) {
                log.warn("executeForLazy has error", e);
            }
        });
    }

    private List<String> generateRecords(List<FieldRule> fieldRules, Map<String, Integer> columnIndex, CapturedPage capturedPage) {
        Map<Integer,String> recordMap = new TreeMap<>();
        List<String> records = new ArrayList<>();
        FieldRuleParser fieldRuleParser = new FieldRuleParser(fieldRules, capturedPage.getPageContent(), pageRule.getPageContentType());
        Map<FieldRule, List<String>> fieldRuleListMap = fieldRuleParser.parse();

        for (Map.Entry<FieldRule, List<String>> entry : fieldRuleListMap.entrySet()) {
            recordMap.put(columnIndex.get(entry.getKey().getFieldName()), JSON.toJSONString(entry.getValue()));
        }
        records.addAll(recordMap.values());
//        records.add(columnIndex.size() - 1, realUrl);
        return records;
    }

    private void writeColumnNames(List<FieldRule> fieldRules) throws TopicSpiderException {
        if (columnNames.isEmpty()) {
            columnNames = fieldRules.stream().map(fieldRule ->
                    fieldRule.getFieldName()
            ).collect(Collectors.toList());

            for (int i = 0; i < columnNames.size(); i++) {
                columnIndex.put(columnNames.get(i), i);
            }
//            columnNames.add("sys_src_url");
        }
        crawlerTaskInst.getTaskInstResultStore().getExtractedFieldStore().writeColumnNames(columnNames);
    }

}
