package cn.net.withub.dataCollector.web.collector;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import cn.net.withub.dataCollector.common.editor.DateUtil;
import cn.net.withub.dataCollector.common.model.TCollectorContent;
import cn.net.withub.dataCollector.common.model.TCollectorData;
import cn.net.withub.dataCollector.common.utils.CreateNewKey;
import cn.net.withub.dataCollector.common.utils.FileUtil;
import cn.net.withub.dataCollector.web.collector.HttpClientUtils;
import cn.net.withub.dataCollector.common.utils.JsonHelper;
import cn.net.withub.dataCollector.web.service.CollectorService;
import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
import org.springframework.web.context.WebApplicationContext;

import javax.annotation.Resource;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by yuanjie on 2018/5/3.
 */
public class CollectorDocUtils extends BreadthCrawler {

    private Logger log = Logger.getLogger(this.getClass());

    private CollectorService collectorService;

    private WebApplicationContext webAppliction;

    private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");

    public CollectorDocUtils(String crawlPath, boolean autoParse, WebApplicationContext wac) {
        super(crawlPath, autoParse);

        webAppliction = wac;
        if (webAppliction != null) {
            collectorService = (CollectorService) webAppliction.getBean("collectorService");
        }

        //自己分析网页的ajax请求,将ajax请求添加到种子任务中进行爬取
        this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=186");
//        this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=5");
//        this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=364");
//        this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=721");
//          this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=313");
//        this.addSeed("http://149.0.160.10/news/newsList1.shtml;jsessionid=E89FEAEE44B202700CB536E27C71CCA6?page=1&pageSize=5&categoryid=548");
    }

    public void visit(Page page, CrawlDatums crawlDatums) {
        String contentType = page.getResponse().getContentType();
        String url = page.getUrl();
        String html = page.getHtml();
        Document doc = page.getDoc();

        //过滤ajax请求地址，并获取请求数据
        if (page.matchUrl("http://149.0.160.10/news/newsList1.shtml.*")) {
            log.info("--------------------------html:\n" + page.getHtml());
            Map<String, Object> map = JsonHelper.jsonToMap(page.getHtml());
            if (map != null) {
                List<Map<String, Object>> list = (List<Map<String, Object>>) map.get("data");
                for (Map<String, Object> m : list) {
                    String newsid = m.get("NewsID") == null ? "" : m.get("NewsID").toString();
                    String title = m.get("title") == null ? "" : m.get("title").toString();
                    long addeddate = Long.parseLong(m.get("addeddate").toString());
                    String newUrl = "http://149.0.0.150/information/InformationDisplay.asp?newsid=" + newsid;
                    Date date = new Date(addeddate);
                    String fbsj = sdf.format(date);
                    //判断已抓取过的最大发布日期，根据标题和发布时间进行对比，未抓取过的进行抓取
                    TCollectorData tCollectorData = (TCollectorData) collectorService.load(TCollectorData.class, " fbsj='" + fbsj + "' and title='" + title + "'");
                    if (tCollectorData == null) {
                        log.info("fbsj: " + fbsj + "抓取");
                        crawlDatums.add(
                                new CrawlDatum(newUrl)
                                        .meta("title", title)
                                        .meta("fbsj", fbsj)
                                        //.meta("firstUrl", url)
                                        //.meta("tqUrl", newUrl)
                        );
                    }

                    /*crawlDatums.add(
                            new CrawlDatum(newUrl)
                                    .meta("title",title)
                                    .meta("fbsj",fbsj)
                    );*/
                }
            }
        } else if (page.matchUrl("http://149.0.0.150/information/InformationDisplay.asp\\?newsid=.*")) {
            log.info("contentType:     \n" + contentType + "     meta:" + page.meta("title"));
            Map<String, Object> map = new HashMap<String, Object>();
            String content = "";
            String serverFilePath = "";
            try {
                String title = page.meta("title");
                String fbsj = page.meta("fbsj");
                //获取word文件
                if (contentType.contains("msword")) {
                    //url = "http://149.0.0.55:8088/dagltp/ftp-upload/xfglcl/2015/12/28/29730c5a3475-e705-41aa-a77f-ce10ea824fdb/297389dd98a3-39ca-4332-8c2a-b165d61ce553/0f4cea89c-96e4-4dc9-a297-f1e67a644610.doc";
                    map = HttpClientUtils.getWord(url);
                }
                //获取pdf文件
                else if (contentType.contains("pdf")) {
                    map = HttpClientUtils.getPdf(url);
                }
                if (map != null) {
                    content = map.get("content") == null ? "" : map.get("content").toString();
                    serverFilePath = map.get("serverFilePath") == null ? "" : map.get("serverFilePath").toString();
                }

                Date date = new Date();
                String id = new CreateNewKey().createId();
                TCollectorData tCollectorData = new TCollectorData();
                tCollectorData.setId(id);
                tCollectorData.setTitle(title);
                tCollectorData.setFbsj(sdf.parse(fbsj));
                tCollectorData.setWjgs("doc");
                tCollectorData.setUrl(url);
                tCollectorData.setWjlj(serverFilePath);
                tCollectorData.setSystime(date);
                collectorService.save(tCollectorData);

                TCollectorContent tCollectorContent = new TCollectorContent();
                tCollectorContent.setId(new CreateNewKey().createId());
                tCollectorContent.setContent(content);
                tCollectorContent.setSystime(date);
                tCollectorContent.setDataId(id);
                collectorService.save(tCollectorContent);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
