package com.choudou5.spider.service;

import cn.hutool.core.util.StrUtil;
import com.choudou5.solr.framework.constants.SolrConsts;
import com.choudou5.solr.framework.holder.RequestContextHolder;
import com.choudou5.solr.util.JsonUtil;
import com.choudou5.solr.util.LogDeBugUtil;
import com.choudou5.solr.util.cache.CacheFactory;
import com.choudou5.solr.util.cache.impl.LRUCache;
import com.choudou5.solr.util.http.HttpUtils;
import com.choudou5.spider.config.SpiderConfigUtil;
import com.choudou5.spider.config.TaskBean;
import com.choudou5.spider.util.SpiderTaskUtil;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * @name：爬虫服务
 * @author：choudou5
 * @date：2018-08-05
 */
public class SpiderService {

    private static final Logger logger = LoggerFactory.getLogger(SpiderService.class);


    private static final int session_timeout = 20*60*1000;

    private static final LRUCache<String, List<Map<String, String>>> CACHE_MAP = CacheFactory.newLRUCache(50, session_timeout);


    public static List<Map<String, String>> pageCrawl(String taskId, String keyword) {
        TaskBean taskBean = SpiderConfigUtil.getTaskBean(taskId);
        return pageCrawl(taskBean, keyword);
    }


    private static List<Map<String, String>> pageCrawl(final TaskBean taskBean, String keyword) {
        LogDeBugUtil.debug("------------------------taskId:{}, {} 开始分页请求.", taskBean.getId(), taskBean.getTitle());
        final String taskId = taskBean.getId();
        try {
            keyword = URLEncoder.encode(keyword, taskBean.getEncoding());
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        String url = SpiderTaskUtil.checkUrlConstParam(taskBean, keyword);
        LogDeBugUtil.debug(url);
        String respBody = HttpUtils.get(url, taskBean.getEncoding());//解析 分页行数据列表
        String kw = null;
        try {
            if(keyword != null)
                kw= URLDecoder.decode(keyword, taskBean.getEncoding());
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        List<Map<String, String>> pageRowDataList = SpiderTaskUtil.parsePageRowDataList(taskBean, respBody, kw);
        if(CollectionUtils.isNotEmpty(pageRowDataList)){
            //缓存记录
            CACHE_MAP.put(taskId + RequestContextHolder.getToken(), pageRowDataList);
            System.out.println(JsonUtil.toStr(pageRowDataList));
        }else{
            logger.warn("第 1 页 爬取 无数据");
            LogDeBugUtil.debug(respBody);
        }
        LogDeBugUtil.debug("------------------------taskId:{}, {} 结束分页请求.", taskId, taskBean.getTitle());
        return pageRowDataList;
    }


    public static Set<String> pageCrawlDicWord(String taskId, String keyword) {
        TaskBean taskBean = SpiderConfigUtil.getTaskBean(taskId);
        String task = taskId+"_"+keyword;
        LogDeBugUtil.debug("------------------------task:{}, {} 开始分页请求.", task, taskBean.getTitle());
        try {
            keyword = URLEncoder.encode(keyword, taskBean.getEncoding());
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        String url = SpiderTaskUtil.checkUrlConstParam(taskBean, keyword);
        LogDeBugUtil.debug(url);
        String respBody = HttpUtils.get(url, taskBean.getEncoding());//解析 分页行数据列表
        String kw = null;
        try {
            if(keyword != null)
                kw= URLDecoder.decode(keyword, taskBean.getEncoding());
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        List<Map<String, String>> pageRowDataList = SpiderTaskUtil.parsePageRowDataList(taskBean, respBody, kw);
        Set<String> dicWords = new HashSet<>();
        if(CollectionUtils.isEmpty(pageRowDataList)){
            logger.warn(task+" 第 1 页 爬取 无数据");
//            LogDeBugUtil.debug(respBody);
            return dicWords;
        }
        String[] wordArr = null;
        for (Map<String, String> row : pageRowDataList) {
            String keywords = row.get(SolrConsts.KEY_KEYWORDS);
            if(StrUtil.isNotBlank(keywords)){
                wordArr = StrUtil.splitToArray(keywords, '|');
                for (String word : wordArr) {
                    if(word.length() <= 4){
                        dicWords.add(word);
                    }
                }
            }
        }
        return dicWords;
    }

    /**
     * 获取缓存 爬虫数据
     * @return
     */
    public static List<Map<String, String>> getCacheSpiderData(String taskId) {
        return CACHE_MAP.get(taskId + RequestContextHolder.getToken());
    }

}
