package com.nlp.visualization.service.impl;

import com.nlp.visualization.common.CONSTANTS;
import com.nlp.visualization.crawler.pipeline.JsonAsFilePipeline;
import com.nlp.visualization.crawler.processor.CommonProcessor;
import com.nlp.visualization.service.ICrawlerService;
import com.nlp.visualization.utils.MD5Util;
import com.nlp.visualization.utils.PropertyUtil;
import com.sun.webkit.network.URLs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.JsonFilePipeline;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;

/**
 * @author LXM
 * @Title: CrawlerServiceImpl
 * @Description: 爬虫接口的实现类
 * @date 2018/3/14下午2:50
 */
@Service
public class CrawlerServiceImpl implements ICrawlerService {

    private Logger logger = LoggerFactory.getLogger(CrawlerServiceImpl.class);


    private final String CONTEXT = CONSTANTS.CONTEXT;

    private final String SAVE_PATH = CONTEXT + PropertyUtil.getInstance("config").getProperty("crawler.savepath");

    private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMM");

    private SimpleDateFormat df = new SimpleDateFormat("yyyyMMddHHmmss");

    /**
     * 爬下网页，以文件的形式持久化
     *
     * @param URLs 网页链接
     * @return
     */
    @Override
    public List<File> crawlAsFiles(String... URLs) throws Exception {

        List<File> websList = new LinkedList<>();

        for (String url : URLs) {
            //获取文件名
            String filePath = getFilePath();
            //新建文件夹
            try {
                File uploadDir = new File(filePath);
                if (!uploadDir.isDirectory()) {
                    // 如果不存在，创建文件夹
                    if (!uploadDir.exists()) {
                        uploadDir.mkdirs();
                    }
                }
                File file = new File(filePath, MD5Util.MD5(System.currentTimeMillis() + "") + ".json");
                logger.info("爬虫爬取链接：" + url);
                logger.info("爬虫存取路径：" + file);
                //新建一个爬虫任务
                Spider.create(new CommonProcessor())
                        .addUrl(URLs)
                        .addPipeline(new JsonAsFilePipeline(file))
                        //开启5个线程抓取
                        .thread(2)
                        //启动爬虫
                        .run();
                websList.add(file);
            } catch (Exception e) {
                logger.error("未知异常");
                throw new Exception("未知异常");
            }
        }

        return websList;
    }

    /**
     * 爬下网页，将smartContent作为字符串传递
     *
     * @param URLs 网页链接
     * @return
     */
    @Override
    public List<String> crawlContentAsStrings(String... URLs) {
        return null;
    }

    /**
     * 通过按月份来分类
     *
     * @return
     */
    private String getFilePath() {
        String savePath = SAVE_PATH;
        String ymd = sdf.format(new Date());
        savePath += ymd;
        return savePath;
    }

}
