package pri.lt.tasks;

import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import pri.lt.MyCrawler;

import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;

@Component
public class SogouWeixinScheduledTasks {

    private static final Logger log = LoggerFactory.getLogger(SogouWeixinScheduledTasks.class);

    private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");

    @Scheduled(fixedRate = 600000)
    public void reportCurrentTime() {
        log.info("The time is now {}", dateFormat.format(new Date()));
        File file = new File("result");
        if (!file.exists()) {
            file.mkdir();
        } else {
            DeleteAll(file);
            file.mkdir();
        }

        File frontier = new File("frontier");
        if (frontier.exists()) {
            DeleteAll(frontier);
        }


        String crawlStorageFolder = "./";
        int numberOfCrawlers = 7;

        CrawlConfig config = new CrawlConfig();
        config.setCrawlStorageFolder(crawlStorageFolder);
        config.setMaxDepthOfCrawling(2);
        config.setIncludeHttpsPages(true);
        config.setResumableCrawling(true);
        config.setUserAgentString("Mozilla/5.0 (Linux; U; Android 8.0.0; zh-cn; MIX 2S Build/OPR1.170623.032) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 MQQBrowser/8.9 Mobile Safari/537.36");

        //config.setProxyHost("192.168.199.144");
        //config.setProxyPort(8888);

        /*
         * Instantiate the controller for this crawl.
         */
        PageFetcher pageFetcher = new PageFetcher(config);
        RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
        robotstxtConfig.setEnabled(false);
        RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
        CrawlController controller = null;
        try {
            controller = new CrawlController(config, pageFetcher, robotstxtServer);
            /*
             * For each crawl, you need to add some seed urls. These are the first
             * URLs that are fetched and then the crawler starts following links
             * which are found in these pages
             */
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_1/pc_1.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_2/pc_2.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_3/pc_3.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_4/pc_4.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_5/pc_5.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_6/pc_6.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_7/pc_7.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_8/pc_8.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_9/pc_9.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_10/pc_10.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_11/pc_11.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_12/pc_12.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_13/pc_13.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_14/pc_14.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_15/pc_15.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_16/pc_16.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_17/pc_17.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_18/pc_18.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_19/pc_19.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_20/pc_20.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_21/pc_21.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_22/pc_22.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_23/pc_23.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_24/pc_24.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_25/pc_25.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_26/pc_26.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_27/pc_27.html");
            controller.addSeed("https://weixin.sogou.com/pcindex/pc/pc_28/pc_28.html");
            //controller.addSeed("https://www.sina.com.cn/");

            /*
             * Start the crawl. This is a blocking operation, meaning that your code
             * will reach the line after this only when crawling is finished.
             */
            controller.start(MyCrawler.class, numberOfCrawlers);
        } catch (Exception e) {
            e.printStackTrace();
        }


    }


    public static void DeleteAll(File dir) {

        if (dir.isFile()) {
            System.out.println(dir + " : " + dir.delete());
            return;

        } else {
            File[] files = dir.listFiles();
            for (File file : files) {

                DeleteAll(file);
            }
        }

        System.out.println(dir + " : " + dir.delete());
    }
}
