package com.chaosj.webmagic.scrape;

import com.chaosj.webmagic.IScrapeHandler;
import com.chaosj.webmagic.dto.SearchRequestDto;
import com.chaosj.webmagic.dto.SearchResponseDto;
import com.chaosj.webmagic.process.SearchProcess;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ResultItemsCollectorPipeline;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

/**
 * @className: GuShiWenScrapeHandler
 * @description:
 * @author: caoyangjie
 * @date: 2021/8/11
 **/
public class SearchScrapeHandler implements IScrapeHandler<SearchRequestDto, String> {
    private ConcurrentHashMap<SearchRequestDto,ResultItemsCollectorPipeline> collectorPipelineMap = new ConcurrentHashMap<>();

    @Override
    public void scrape(SearchRequestDto request) {
        Spider.create(new SearchProcess(request))
                .addUrl(request.holdRequest(request.getEnums().getTarget()))
                .addPipeline(collectorPipelineMap.computeIfAbsent(request,(req)->new ResultItemsCollectorPipeline())).thread(5).run();
    }

    @Override
    public String getResult(SearchRequestDto requestDto) {
        List<SearchResponseDto> responses = collectorPipelineMap.get(requestDto).getCollected().stream().filter(ri->ri.get("data")!=null).map(ri->(SearchResponseDto)ri.get("data")).collect(Collectors.toList());
        StringBuffer buffer = new StringBuffer();
        responses.stream().forEach(srd->buffer.append(srd.buffer()));
        return buffer.toString();
    }
}
