package com.carl.main;

import com.carl.mapper.ZhiHuMapper;
import com.carl.model.ZhiHu;
import com.carl.utils.MybatisHelper;
import com.carl.utils.RedisUtils;
import org.apache.ibatis.session.SqlSession;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;

public class Day04_ZhiHuCrawler {

     /**
           regex - wholeData : div.row div.box
           regex - zhihuData :
                         href : a.href
                         pic : a img.src
                         title : span[text]
          定时任务 :
               使用timer任务调度,调用封装抓取方法，实现定时抓取
               TimerTask执行任务 , 相关参数 :
                    run() --> task
                    delay --> 延时多久执行
                    period --> 下次执行的时间间隔
      **/
     // zhihu : http://daily.zhihu.com
     public static void main(String[] args)throws Exception{
          Timer timer = new Timer();

          timer.schedule(new TimerTask() {
               @Override
               public void run() {
                    try {
                         crawler_zhihu();
                    } catch (IOException e) {
                         e.printStackTrace();
                         System.out.println(" exception -- crawler_zhihu ");
                    }
               }
          }, 1000, 1000 * 30);

     }

     protected static void crawler_zhihu() throws IOException {
          System.out.println("抓取开始 !!! : " + new Date());
          SqlSession sqlSession = MybatisHelper.getSqlSessionLocal();
          ZhiHuMapper zhiHuMapper = sqlSession.getMapper(ZhiHuMapper.class);

          String crawlerUrl = "http://daily.zhihu.com";
          Document document = Jsoup.connect(crawlerUrl).get();
          Elements elements = document.select("div.row div.box");

          if (elements != null) {
               for (Element element : elements) {
                    ZhiHu zhiHu = new ZhiHu();
                    String dataTitle = element.select("span").text();
                    String dataUrl = crawlerUrl + element.select("a").attr("href");
                    boolean exist = RedisUtils.exitsKey(dataUrl);
                    if (exist == true) {
                         System.out.println("title : " + dataTitle + "  existed !");
                         continue;
                    } else {
                         RedisUtils.insertKV(dataUrl, "1");
                         String dataPic = element.select("a img").attr("src");
                         zhiHu.setPic(dataPic);
                         zhiHu.setUrl(dataUrl);
                         zhiHu.setTitle(dataTitle);
                         zhiHu.setCrawlerTime(new Date());
                         System.out.println("\t title : " + dataTitle + "  入库中");

                         try {
                              zhiHuMapper.insert(zhiHu);
                              sqlSession.commit();
                         } catch (Exception e) {
                              RedisUtils.deleteKey(dataUrl);
                              e.printStackTrace();
                         }

                    }
               }
          }
          sqlSession.close();
          System.out.println("抓取结束 !!! ");
     }


}
