package com.coderfans.htmlpaser.model;

import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by ${fanchunshuai} on 2016-11-2.
 *
 * @version: V1.0
 * @Desc:  单页爬取类  ----实时在线爬取模式
 * @Copyright (c) 2016 58到家-版权所有
 */
public class SinglePageModel<T> {

    Logger logger = Logger.getLogger(this.getClass());
    /**
     * 设置重试次数
     */
    private int retriyTimes;

    /**
     * 设置休眠时间
     */
    private int sleepTimes;

    /**
     * 爬取的单页应用链接
     */
    private String spiderUrl;

    /**
     * 定义爬取规则
     * key:爬取的关键字，比如div里的title  a标签里的href
     * value 是爬取的xpath 规则
     */
    private  Map<String,String>  spiderModelMap;

    /**
     * 将爬取到的结果放到该map里
     */
    private Map<String,Object>  resultModelMap = new HashMap<>();

    //爬取到的文件的根目录
    private String rootFilePath;
    //文件路径中的key 标识符
    private String pathKey;
    //文件名
    private String fileName;
    /**
     * 实时抓取模式，根据xpath规则将数据抓取到
     * @param spiderModelMap
     */
    public SinglePageModel(Map<String,String> spiderModelMap){
        this.spiderModelMap = spiderModelMap;
    }

    public SinglePageModel(String rootPath, String pathKey, String fileName){
        this.rootFilePath = rootPath;
        this.pathKey = pathKey;
        this.fileName = fileName;
    }

    /**
     * /**
     * 调用该方法进行爬取
     * @param spiderUrl  爬取的链接uri
     * @param threadNum  爬取的线程数  根据操作系统等配置适当设置
     * @return  返回封装的爬取结果对象，以key--value的方式，由开发者自己决定返回结果如何使用
     * @throws ClassNotFoundException
     * @throws IllegalAccessException
     * @throws InstantiationException
     */
    public Map<String,Object> runSpider(String spiderUrl,int threadNum) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
        this.spiderUrl = spiderUrl;
        us.codecraft.webmagic.Spider.create(new SinglePageModel.Spider())
                .addUrl(spiderUrl)
                .thread(threadNum).run();
        return this.resultModelMap;
    }


    /**
     * 调用该方法进行爬取
     * @param spiderUrl  爬取的链接uri
     * @param threadNum  爬取的线程数  根据操作系统等配置适当设置
     * @param clazz      要生成对象的class
     * @return
     * @throws ClassNotFoundException
     * @throws IllegalAccessException
     * @throws InstantiationException
     */
    public T runSpider(String spiderUrl,int threadNum,Class<T> clazz) {
        this.spiderUrl = spiderUrl;
        us.codecraft.webmagic.Spider.create(new SinglePageModel.Spider())
                .addUrl(spiderUrl)
                .thread(threadNum).run();
        //这里依赖Spring的bean包装类,
        T obj = null;
        try {
            obj = BeanUtils.instantiateClass(clazz);
            BeanWrapper beanWrapper = new BeanWrapperImpl(obj);
            beanWrapper.setPropertyValues(this.resultModelMap);
        }catch (Exception e){
            logger.error("bean 包装出错",e);
        }
        return obj;
    }

    public void dowloadSpider(String downloadUrl,int threadNum){
        this.spiderUrl = downloadUrl;
        us.codecraft.webmagic.Spider.create(new SinglePageModel.downloadSpider())
                .addUrl(spiderUrl)
                .thread(threadNum).run();

    }

    //根据指定链接获取网页中的数据，在线分析
    private  class Spider implements PageProcessor {

        private Site site = Site.me().setRetryTimes(3).setSleepTime(1000);
        @Override
        public void process(Page page) {
            page.addTargetRequests(page.getHtml().links().regex("("+SinglePageModel.this.spiderUrl+")").all());

            for (Map.Entry<String, String> entry: SinglePageModel.this.spiderModelMap.entrySet()) {
                String key = entry.getKey();
                String xpathValue = entry.getValue();
                //如果key或者属性以list,List结尾则使用
                if(key.endsWith("List") || key.endsWith("list")){
                    List<String> resultValue = page.getHtml().xpath(xpathValue).all();
                    SinglePageModel.this.resultModelMap.put(key,resultValue);
                }else{
                    String resultValue = page.getHtml().xpath(xpathValue).get();
                    SinglePageModel.this.resultModelMap.put(key,resultValue);
                }
            }
        }

        @Override
        public Site getSite() {
            return site;
        }
    }

    //下载网页，离线存储，进行网页数据抽取
    private class downloadSpider implements PageProcessor{
        private Site site = Site.me().setRetryTimes(3).setSleepTime(1000);

        @Override
        public void process(Page page) {
            page.addTargetRequests(page.getHtml().links().regex("("+SinglePageModel.this.spiderUrl+")").all());
            if(StringUtils.isNotBlank(page.getHtml().get())){
                String filepath = SinglePageModel.this.rootFilePath+"/"+SinglePageModel.this.pathKey+"/"+SinglePageModel.this.fileName+".html";
                logger.info("filePath -----------------:"+filepath);
                System.out.println("filepath = "+filepath);
                File file = new File(filepath);
                if (!file.getParentFile().exists()) {
                    file.getParentFile().mkdirs();
                }
                try {
                    if(!file.exists()){
                        file.createNewFile();
                        FileWriter fw = new FileWriter(file, true);
                        BufferedWriter bw = new BufferedWriter(fw);
                        bw.write(page.getHtml().get());
                        bw.flush();
                        bw.close();
                        fw.close();
                    }
                } catch (IOException e) {
                    logger.error("文件名、目录名或卷标语法不正确",e);
                }
            }
        }

        @Override
        public Site getSite() {
            return site;
        }
    }




}
