package com.spider.silence.downloader;

import com.alibaba.fastjson.JSONObject;
import com.spider.silence.abstarct.downloader.AbstractCommonDownLoader;
import com.spider.silence.task.ChinaNewsForSiChuanTask;
import com.spider.silence.task.InformantCenterForSiChuanTask;
import com.spider.silence.task.manager.CommonTaskManager;
import com.spider.silence.utils.DateUtils;
import com.spider.silence.utils.FileUtils;
import com.spider.silence.utils.MD5Utils;
import com.spider.silence.utils.PropertiesUtil;
import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import us.codecraft.webmagic.Page;

import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * Created by admin on 2017/7/3.
 * 四川互联网举报中心下载器
 */
public class InformantCenterForSiChuanDownloader extends AbstractCommonDownLoader {

    private Logger logger =Logger.getLogger(ChinaNewsForSiChuanDownloader.class);

    private Map<String,File> files = null;

    public Map<String, File> getFiles() {
        return files;
    }

    private final String index_site = "http://www.scjb.gov.cn";

    //版块
    private final String[] section = {
            //"http://www.scjb.gov.cn/accept",//受理情况
            "http://www.scjb.gov.cn/policy",//政策法规
            "http://www.scjb.gov.cn/notice"//安全上网

    };





    //生成爬虫任务
    public void generateTask(){
        try {
            Page shouliPage = getHtml("http://www.scjb.gov.cn/accept"); //受理情况
            if(shouliPage != null && shouliPage.getHtml() != null) {
                Document document = shouliPage.getHtml().getDocument();
                Elements elements = document.select("div[id=\"main\"] div[class=\"main_l\"] > div[class=\"main_l2\"] > div[class=\"main_l_m\"] > div[class=\"aqsw_main\"] > ul > li");
                if(elements.size() > 0) {
                    for(int j = 0; j < elements.size(); j++) {
                        String url = index_site + elements.get(j).select("div[class=\"aqsw_lb_bt\"] a").attr("href");
                        InformantCenterForSiChuanTask task = new InformantCenterForSiChuanTask(url);
                        CommonTaskManager.add("InformantCenterForSiChuan", task);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        for(int i = 0; i < section.length; i++) {
            try {
                Page page = getHtml(section[i]);
                if(page != null && page.getHtml() != null) {
                    Document document = page.getHtml().getDocument();
                    Elements elements = document.select("div[id=\"main\"] div[class=\"main_l\"] > div[class=\"main_l2\"] > div[class=\"main_l_m\"] > div[class=\"zcfg_main\"] > div[class=\"zcfg_main_m\"] > ul > li");
                    if(elements.size() > 0) {
                        for(int j = 0; j < elements.size(); j++) {
                            String url = index_site + elements.get(j).select("a").attr("href");
                            InformantCenterForSiChuanTask task = new InformantCenterForSiChuanTask(url);
                            CommonTaskManager.add("InformantCenterForSiChuan", task);
                        }
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
                logger.error(e.getMessage(),e);
                continue;
            }
        }
    }


    public JSONObject crawlItem(String url) throws Exception {
        JSONObject item = new JSONObject();
        JSONObject data;
        //该字段用来标识该爬虫是否成功完成
        boolean success = false;
        long startTime = System.currentTimeMillis();
        long nowTime = startTime;
        while (nowTime - startTime < Long.valueOf(PropertiesUtil.getVal("InformantCenterForSiChuan.timeOut"))) {
            //假如有异常, 就重新爬取，并将以前的数据重置
            data = new JSONObject();
            data.put("url", url);
            nowTime = System.currentTimeMillis();
            try {
                Page page = getHtml(url,true);
                if(page != null && page.getStatusCode() == 200) {
                    Document document = page.getHtml().getDocument();
                    Elements main = document.select("div[class=\"aqswnr_main\"]");
                    String title = main.select("div[class=\"aqswnr_bt\"]").text();
                    data.put("title", title);
                    String temp = main.select("div[class=\"aqswnr_ly\"]").text();
                    String publish_time = temp.substring(temp.length()-17);
                    publish_time = (publish_time.replace(" ", " ")+":00").replace("年", "-").replace("月", "-").replace("日", "");
                    publish_time = DateUtils.strToFormatStr(publish_time);
                    data.put("publish_time", publish_time); //新闻发布时间
                    data.put("spider_time", new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));//爬取时间

                    Elements content_img_elements = document.select("div[class=\"aqswnr_nr\"] > p");
                    List<String> img_urls = new ArrayList<>();
                    StringBuilder content = new StringBuilder();
                    for(int i = 0; i < content_img_elements.size(); i++) {
                        Element item_element = content_img_elements.get(i);
                        if(item_element.select("img").size() > 0) {
                            img_urls.add(item_element.select("img").attr("src"));
                        } else {
                            content.append(item_element.text().trim());
                        }
                    }

                    //设置html文件的命名格式
                    String fileName = "InformantCenterForSiChuan" + "_" + DateUtils.dateToTimestamp( DateUtils.parseDate(publish_time)).getTime() + "_" + MD5Utils.getHash3(url.replaceAll("#.*", ""), "MD5");
                    String publishDate = DateUtils.formatDate(DateUtils.parseDate(publish_time)).replace("-","-");
                    String all_fileName = "四川互联网举报中心_speeches_" + publishDate + "_" + fileName;
                    //抓取第一页的网页源码
                    File file = FileUtils.createTempFile(all_fileName + "_pageNo_1", document.html().toString());
                    files = new ConcurrentHashMap<>();
                    files.put(all_fileName + "_pageNo_1", file);
                    //抓取后面几页的内容
                    Elements page_elements = document.select("div[class=\"page\"] > div");
                    if(page_elements.size() > 5) {
                        for(int i = 0; i < page_elements.size()-5; i++){
                            Thread.sleep(5000); //线程睡3秒避免频繁访问
                            Document later_document = getHtml(url+"&page=" + (i+2),true).getHtml().getDocument();
                            Elements later_content_img_elements = later_document.select("div[class=\"aqswnr_nr\"] > p");
                            for(int j = 0; j < later_content_img_elements.size(); j++) {
                                Element later_item_element = later_content_img_elements.get(j);
                                if(later_item_element.select("img").size() > 0) {
                                    img_urls.add(later_item_element.select("img").attr("src"));
                                } else {
                                    content.append(later_item_element.text().trim());
                                }
                            }
                            //抓取源码
                            File later_file = FileUtils.createTempFile(all_fileName + "_pageNo_" + (2+i), later_document.html().toString());
                            files.put(all_fileName + "_pageNo_" + (i+2), later_file);
                        }
                    }
                    if(img_urls.size() > 0) {
                        data.put("img_urls", img_urls.toArray());
                    }
                    data.put("content", content);

                    item.put("data", data);
                    success = true;
                    break;
                }
            } catch (Exception e) {
                //e.printStackTrace();
                //等三秒继续爬
                Thread.sleep(1500);
                logger.info(e);
                System.err.println("爬虫过程出现异常, 尝试重新爬取");
                continue;
            }
        }
        if(success == true) {
            return item;
        } else {
            throw new Exception(url+"爬取失败");
        }
    }
}
