package com.likai.crawler;

import java.io.*;
import java.util.Date;
import java.util.LinkedList;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;

import com.likai.dao.IJarMapper;
import com.likai.pojo.JarBean;
import com.likai.util.SqlSessionFactoryUtil;
import org.apache.http.HttpEntity;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.ibatis.session.SqlSession;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

/**
 * create by likai 2018-10-10
 * 爬虫起始类
 */
public class StartCrawler {

    //获取日志对象
    private static Logger logger = Logger.getLogger(StartCrawler.class) ;

    //忽略的url后缀
    public static String [] excludeUrl = new String[]{
            ".pom",
            ".xml",
            ".md5",
            ".sha1",
            ".asc",
            ".gz",
            ".zip",
            "../"
    } ;

    //爬虫url队列
    public static Queue<String> waitForCrawlerUrls = new LinkedList<String>() ;

    //统计
    private static int total = 0 ;

    //判断线程是否继续
    private static boolean exeFlag = true ;

    /**
     * 解析网页内容
     * @param webContent 网页内容
     */
    public static void parseWebPage(String webContent,String realPath) {
        if("".equals(webContent)) {
            return ;
        }
        SqlSession sqlSession = SqlSessionFactoryUtil.openSqlSession() ;
        IJarMapper jarMapper = sqlSession.getMapper(IJarMapper.class) ;

        //获取Jsoup document对象
        Document document = Jsoup.parse(webContent) ;
        //获取所有a标签
        Elements elements = document.select("a") ;
        for (Element element : elements) {
            String hrefValue = element.attr("href") ;
            boolean flag = true ;
            //判断是否为有效url
            for (String reg : excludeUrl) {
                if(hrefValue.endsWith(reg)) {
                    flag = false ;
                    break ;
                }
            }
            if(flag) {
                //再次判断
                // 如果是.jar则进行逻辑处理
                //如果是类似 0.3-3/ 目录则继续爬取数据(加入队列)
                if(hrefValue.endsWith(".jar")) {
                    total ++ ;
                    logger.info("发现第"+total+"个目标: " + realPath + hrefValue);
                    //根据name查询 如果存在不再添加
                    JarBean bean = jarMapper.selectJarByName(hrefValue) ;
                    if(bean == null) {
                        JarBean jar = new JarBean() ;
                        jar.setUuid(UUID.randomUUID().toString());
                        jar.setName(hrefValue);
                        jar.setPath(realPath + hrefValue);
                        jar.setUpdateDate(new Date());
                        if(hrefValue.endsWith("javadoc.jar")) {
                            //java文档
                            jar.setType("javadoc");
                        } else if(hrefValue.endsWith("sources.jar")) {
                            //java源码
                            jar.setType("sources");
                        } else {
                            //jar包
                            jar.setType("jar");
                        }
                        //添加
                        int state = jarMapper.insertDynamic(jar) ;
                        if(state == 1) {
                            //事物提交
                            sqlSession.commit();
                            logger.info("["+hrefValue+"] 添加成功");
                        } else {
                            logger.info("["+hrefValue+"] 添加失败");
                        }
                    } else {
                        logger.info("["+hrefValue+"] 已经存在数据库中");
                        continue;
                    }

                } else {
                    logger.info("爬虫队列新增url:" + realPath + hrefValue);
                    addUrl(realPath + hrefValue,"解析网页");
                }
            }

        }
        try{
            sqlSession.close();
        } catch (Exception e) {
            logger.error("Exception",e);
        }
    }

    /**
     * 添加url到爬虫队列
     * @param url
     */
    public static void addUrl(String url,String info) {
        if("".equals(url) || url == null) {
            return ;
        }
        //此url不存在队列中 再添加
        if(!waitForCrawlerUrls.contains(url)) {
            logger.info("["+info+"] " +url + " 添加到爬虫队列");
            waitForCrawlerUrls.add(url) ;
        }
    }

    /**
     * 解析网页请求
     */
    public static void parseUrl() {
        ExecutorService executorService = Executors.newFixedThreadPool(10) ;
        while (exeFlag) {
            if(waitForCrawlerUrls.size() > 0) {
                //开启新的线程
                executorService.execute(new Runnable() {
                    @Override
                    public void run() {
                        //摘取队列第一个元素
                        String url = waitForCrawlerUrls.poll() ;
                        if(url == null || "".equals(url)) {
                            return ;
                        }
                        logger.info("执行解析: " + url);
                        //获取httpclient实例
                        CloseableHttpClient httpClient =  HttpClients.createDefault() ;

                        //创建httpGet实例
                        HttpGet httpGet = new HttpGet(url) ;

                        RequestConfig config = RequestConfig.custom()
                                .setSocketTimeout(100000)   //设置读取超时时间100秒
                                .setConnectTimeout(5000)    //设置连接超时时间
                                .build() ;

                        httpGet.setConfig(config);

                        CloseableHttpResponse response = null ;

                        try {
                            response=httpClient.execute(httpGet);
                        } catch (ClientProtocolException e) {
                            logger.error("ClientProtocolException", e);
                            addUrl(url,"由于异常");
                        } catch (IOException e) {
                            logger.error("IOException", e);
                            addUrl(url,"由于异常");
                        }

                        if(response!=null){
                            //获取网页实体
                            HttpEntity entity=response.getEntity();
                            //判读url是否为网页
                            if("text/html".equals(entity.getContentType().getValue())){
                                String webPageContent=null;
                                try {
                                    //获取网页内容
                                    webPageContent = EntityUtils.toString(entity, "utf-8");
                                    parseWebPage(webPageContent,url);
                                } catch (IOException e) {
                                    logger.error("IOException", e);
                                    addUrl(url,"由于异常");
                                }
                            }
                            try {
                                response.close();
                            } catch (IOException e) {
                                logger.error("IOException", e);
                                addUrl(url,"由于异常");
                            }
                        }else{
                            logger.info("连接超时");
                            addUrl(url,"由于异常");
                        }
                        //关闭资源
                        try {
                            response.close();
                            httpClient.close();
                        } catch (IOException e) {
                            logger.error("IOException", e);
                            addUrl(url,"由于异常");
                        }
                    }
                });
            } else {
                //如果活跃的线程数为0
                if(((ThreadPoolExecutor) executorService).getActiveCount() == 0) {
                    //结束所有线程
                    executorService.shutdown();
                    exeFlag = false ;
                    logger.info("爬虫任务已经完成");
                }
            }


            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                logger.error("线程休眠报错",e);
            }

        }

    }

    private static void init() {
        logger.info("开始读取爬虫配置文件");
        FileInputStream fis = null ;
        InputStreamReader isr = null ;
        BufferedReader br = null ;
        String str = null ;
        try {
            fis = new FileInputStream("D:"+ File.separator+"crawler.txt") ;
            isr = new InputStreamReader(fis) ;
            br = new BufferedReader(isr) ;
            while ((str=br.readLine()) != null) {
                addUrl(str,"初始化");
            }
        } catch (FileNotFoundException e) {
            logger.error("FileNotFoundException",e);
        } catch (IOException e) {
            logger.error("IOException",e);
        } finally {
            try {
                br.close();
                isr.close();
                fis.close();
            } catch (IOException e) {
                logger.error("IOException",e);
            }
        }
        logger.info("完成读取爬虫配置文件");
        parseUrl();
    }

    public static void main(String [] args) {
        logger.info("开始执行爬虫任务");
        init() ;
    }
}
