package com.xiezc.service;

import com.mzlion.easyokhttp.HttpClient;
import com.mzlion.easyokhttp.response.callback.CallbackAdaptor;
import com.mzlion.easyokhttp.response.handle.DataHandler;
import com.mzlion.easyokhttp.response.handle.StringDataHandler;
import com.xiezc.dao.StoryContentRepository;
import com.xiezc.dao.StoryRepository;
import com.xiezc.dto.Spider;
import com.xiezc.entity.Story;
import com.xiezc.entity.StoryContent;
import com.xiezc.util.XException;
import com.xiezc.util.XUtil;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Resource;
import java.lang.reflect.Method;
import java.util.List;

/**
 * 爬虫服务类
 */
public abstract class Crawler {

    private static final Logger log = LoggerFactory.getLogger(Crawler.class);

    @Resource
    private StoryContentRepository storyContentRepository;
    @Resource
    private StoryRepository storyRepository;
    @Resource
    private RedisService redisService;

    /**
     * 获得网页
     */
    public void fetchHtml(Spider spider) throws InterruptedException {
        Thread.sleep(1000);
        String url = spider.getUrl();
        HttpClient.get(url).execute(new CallbackAdaptor<String>() {
            /**
             * 结果处理器, 这里使用默认实现的spring处理器
             * @return
             */
            @Override
            public DataHandler<String> getDataHandler() {
                return StringDataHandler.create();
            }

            /**
             *  将爬的网页字符串直接放入htmlXQueue
             * @param data data就是经过处理后的数据
             */
            @Override
            public void onSuccess(String data) {
                spider.setHtml(data);
                redisService.lPush("htmlXQueue", spider);
            }
        });

    }

    /**
     * 解析网页
     */
    public Spider parseHtml(Spider spider) {
        try {
            Document doc = Jsoup.parse(spider.getHtml());
            Method method = getClass().getMethod(spider.getCallback(), Document.class, String.class);
            Object result = method.invoke(this, doc, spider.getUrl());
            if (result instanceof Story) {
                spider.setStory((Story) result);
                return spider;
            }
            if (result instanceof StoryContent) {
                spider.setStoryContent((StoryContent) result);
                return spider;
            }
        } catch (Exception e) {
            throw new XException(e);
        }
        return null;
    }

    /**
     * 保存数据
     */
    public void saveData(Spider spider) {
        if (spider.getStory() != null) {
            Story story = spider.getStory();
            List<Story> storyList = storyRepository.findByUrl(story.getUrl());
            if (storyList.isEmpty()) {
                log.info("saveData;  url:" + spider.getUrl());
                storyRepository.save(story);
            }
        }
        if (spider.getStoryContent() != null) {
            StoryContent storyContent = spider.getStoryContent();
            List<StoryContent> storyList = storyContentRepository.findByUrl(storyContent.getUrl());
            if (storyList.isEmpty()) {
                log.info("saveData;  url:" + spider.getUrl());
                storyContentRepository.save(storyContent);
            }
        }
    }

    public void crawler(String url, String methodName) {
        this.crawler(url, null, methodName);
    }

    public void crawler(String url, String previousUrl, String methodName) {
        url = XUtil.dealUrl(url, previousUrl);
        if (redisService.setIsMember("fetchedUrl", url)) {
            return;
        }
        redisService.setAdd("fetchedUrl", url);
        Spider spider = new Spider();
        spider.setUrl(url);
        spider.setCallback(methodName);
        spider.setPreviousUrl(previousUrl);
        log.info("urlXQueue:  " + redisService.lLen("urlXQueue") + "   ;add to urlXQueue: " + spider.getUrl() + "+++++++++++++++++++");
        redisService.lPush("urlXQueue", spider);
    }

    public void fetchThread() {
        XUtil.getThreadPool().submit(() -> {
            while (!XUtil.stop) {
                try {
                    Spider spider = (Spider)redisService.lPop("urlXQueue", 30);
                    if (spider == null) {
                        continue;
                    }
                    log.info("htmlXQueue.size:" + redisService.lLen("htmlXQueue") + "     fetchHtml;  " + spider.getUrl());
                    this.fetchHtml(spider);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
    }

    public void praseThread() {
        XUtil.getThreadPool().submit(() -> {
            while (!XUtil.stop) {
                try {
                    Spider spider = (Spider)redisService.lPop("htmlXQueue", 30);
                    if (spider == null) {
                        continue;
                    }
                    log.info("dataXQueue.size: " + redisService.lLen("dataXQueue") + "      parseHtml;  url:" + spider.getUrl());
                    spider = this.parseHtml(spider);
                    if (spider != null) {
                        redisService.lPush("dataXQueue", spider);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
    }

    public void saveThread() {
        XUtil.getThreadPool().submit(() -> {
            while (!XUtil.stop) {
                try {
                    Spider spider = (Spider)redisService.lPop("dataXQueue", 30);
                    if (spider == null) {
                        continue;
                    }
                    this.saveData(spider);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

        });
    }
}
