package com.ly.crawl;

import com.ly.crawl.util.Utils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;

public class Crawler<T> {

    private Capturer<Set<String>> urlsCapturer;
    private Capturer<T> dataCapturer;
    private DataDao<T> dao;
    private Set<String> urls = new LinkedHashSet<>();
    private Set<String> ends = new LinkedHashSet<>();
    private Set<String> errs = new LinkedHashSet<>();
    private int size = 100;
    private int nums = 0;
    private PrintStream out;
    private T data;
    private boolean returnData;
    private ProgressListener progressListener;
    private ProgressEvent pe = new ProgressEvent();

    public Crawler(String url, Capturer<T> dataCapturer) {
        this.dataCapturer = dataCapturer;
        urls.add(url);
        out = System.out;
    }

    public T start() throws IOException {
        String url;
        while (urls.isEmpty() == false && nums < size) {
            url = urls.stream().findFirst().get();
            out.println("====================== 开始采集: " + url + " ======================");
            try {
                Document doc;
                if (url.startsWith("http")) {
                    doc = Jsoup.parse(new URL(url), 5000);  // 在线采集
                } else if (url.matches("\\s*<.+")) {
                    doc = Jsoup.parse(url, "");                 // HTML采集
                } else {
                    doc = Jsoup.parse(new File(url), "utf-8"); //本地文件采集
                }
                if (urlsCapturer != null) {
                    for (String newurl : urlsCapturer.capture(url, doc)) {
                        String realURL = Utils.getRealURL(url, newurl);
                        if (!urls.contains(realURL) && !ends.contains(realURL)) {
                            urls.add(realURL);
                        }
                    }
                }
                if (save(url, dataCapturer.capture(url, doc)) == false) {
                    System.out.println("采集中断!");
                    break;
                }
                ends.add(url);
            } catch (Exception e) {
                e.printStackTrace();
                errs.add(url);
            } finally {
                urls.remove(url);
            }
        }
        if (dao != null) {
            dao.after();
        }
        report();
        if (progressListener != null) {
            progressListener.onCaptured(pe.setProgress(null, nums, 100));
        }
        return data;
    }

    /**
     * 返回值表示是否继续
     */
    private boolean save(String url, T data) {
        if (data != null) {
            if (dao != null) {
                dao.save(data);
            }
            nums += data instanceof Collection ? ((Collection) data).size() : 1;
            if (returnData) {
                if (this.data != null && data instanceof List) {
                    ((List) this.data).addAll((List) data);
                } else {
                    this.data = data;
                }
            }
            if (progressListener != null) {
                if (progressListener.onCaptured(pe.setProgress(url, nums, nums * 100 / size)) == false) {
                    return false;
                }
            }
        }
        return true;
    }

    private void report() {
        out.println("============== 全部的链接 ==== 共 :\t" + (ends.size() + errs.size()) + "\t条 ==========");
//        for (String u : ends) {
//            out.println(u);
//        }
        out.println("============== 成功的链接 ==== 共 :\t" + ends.size() + "\t条 ==========");
//        for (String u : errs) {
//            out.println(u);
//        }
        out.println("============== 失败的链接 ==== 共 :\t" + errs.size() + "\t条 ==========");
        out.println("============== 采集的数据 ==== 共 :\t" + nums + "\t条 ==========");
    }

    public Crawler setUrlsCapturer(Capturer<Set<String>> urlsCapturer) {
        this.urlsCapturer = urlsCapturer;
        return this;
    }

    public Crawler setDao(DataDao<T> dao) {
        this.dao = dao;
        return this;
    }

    public Crawler setSize(int size) {
        this.size = size;
        return this;
    }

    public Crawler setNums(int nums) {
        this.nums = nums;
        return this;
    }

    public Crawler setOut(PrintStream out) {
        this.out = out;
        return this;
    }

    public Crawler setReturnData(boolean returnData) {
        this.returnData = returnData;
        return this;
    }

    public Crawler setProgressListener(ProgressListener progressListener) {
        this.progressListener = progressListener;
        return this;
    }

    public T getData() {
        return data;
    }

    public static interface ProgressListener {
        boolean onCaptured(ProgressEvent pe);
    }

    public static class ProgressEvent {
        private String url;
        private int nums;
        private int rate;

        public ProgressEvent setProgress(String url, int nums, int rate) {
            this.url = url;
            this.nums = nums;
            this.rate = rate;
            return this;
        }

        public String getUrl() {
            return url;
        }

        public int getNums() {
            return nums;
        }

        public int getRate() {
            return rate;
        }
    }
}

