package com.wzgl.core.webmagic.processor;

import java.io.Serializable;
import java.util.Date;
import java.util.List;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;

import com.jfinal.plugin.redis.Redis;
import com.wzgl.core.CacheConsts;
import com.wzgl.core.model.TfinishUrl;
import com.wzgl.core.model.Tgrid;
import com.wzgl.core.server.FinishurlService;
import com.wzgl.core.utils.DateUtil;
import com.wzgl.core.utils.WZGLUtils;
import com.wzgl.core.vo.GameSkyVo;
import com.wzgl.core.webmagic.pipeline.TgridsImgPipeLine;
import com.wzgl.core.webmagic.utils.HrefUtils;
import com.wzgl.core.webmagic.utils.PicUtiles;

import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;

/**
 * 爬取韩国美女图片  http://www.27270.com/tag/441.html
 * 每天150张图片
 * @author Administrator
 */
public class HanGuoImgProcessor extends GameSkyVo implements PageProcessor,Serializable,Job{
	private static final long serialVersionUID = 1L;
	private static FinishurlService me = FinishurlService.me;
    private static Tgrid t = null;
    private static Spider s;
    private int size = 1;
    private StringBuffer sb = new StringBuffer();
    private Site site = Site.me().setRetryTimes(1).setSleepTime(1000).addHeader("Accept-Encoding", "/");
    private String pic_ = "http://www.27270.com/";
    private GameSkyVo tf;
	
    public static void main(String[] args) {
    	System.out.println("美女--------------------------开始\t"+"时间：" +DateUtil.getTime());
    	s = Spider.create(new HanGuoImgProcessor()).addUrl("http://www.27270.com/tag/782.html").thread(1);    
//    	s.addPipeline(new TgridsImgPipeLine());
//    	s.getStatus();
    	s.run();
    	System.out.println("美女--------------------------执行完毕\t"+"时间：" +DateUtil.getTime());
	}
    
	@Override
	public void execute(JobExecutionContext context) throws JobExecutionException {
		System.out.println("27270美女--------------------------开始\t"+"时间：" +DateUtil.getTime());
		s = Spider.create(new HanGuoImgProcessor()).addUrl(new String[]{"http://www.27270.com/tag/441.html",
				"http://www.27270.com/tag/782.html","http://www.27270.com/tag/384.html","http://www.27270.com/tag/320.html",
				"http://www.27270.com/tag/288.html","http://www.27270.com/tag/35.html","http://www.27270.com/tag/860.html",
				"http://www.27270.com/tag/424.html","http://www.27270.com/tag/370.html","http://www.27270.com/tag/131.html",
				"http://www.27270.com/tag/274.html","http://www.27270.com/tag/875.html"}).thread(1);    
    	s.addPipeline(new TgridsImgPipeLine());
//    	s.getStatus();
		s.run();
    	System.out.println("27270美女--------------------------执行完毕\t"+"时间：" +DateUtil.getTime());
	}

	@Override
	public void process(Page page) {
		if(page.getUrl().regex("http://www.27270.com/ent/").match()){
			 //在列表结果集中拿出信息
   		 	 Tgrid tgrid = (Tgrid) WZGLUtils.getCache(CacheConsts.CACHE_MAGIC_GIRD_NAME, page.getUrl());
   		 	
   		 	 //来源
   		 	 String ffrom = "27270.com";
	   		 System.out.println(ffrom);
	   		 tgrid.setFfrom(ffrom);
	   		 
	   	     //正文
    		 String cont = "";
    		 String pic_text = page.getHtml().xpath("//div[@id='picBody']/p").toString();
    		 if("".equals(pic_text) || pic_text == null){
    			 cont = "";
    		 }else{
    			 cont = pic_text.substring(0, pic_text.lastIndexOf("</p>")+4);
    		 }
    		 
    		 cont = cont +"<br/><br/>";
    		 
    		 //控制获取分页 
 	       	 List<String> hrefs = page.getHtml().xpath("//ul[@class='articleV4Page']/li/a").all();
 	       	 if(hrefs.size()>0){
 	       		String repex = "//div[@class='articleV4Body']/p";
 	       		cont = HrefUtils.getOtherHrefForImg(hrefs, cont,repex);
 	       	 }	
    		 
    		 //下载图片	 
    		 cont = PicUtiles.replaceAllHrefAndImg(cont);
    		 
    		 System.out.println("cont:  "+cont+"\n");
        	 tgrid.setFhtml(cont);
        	 
    		 //通过url缓存此对象
    		 String cachename = CacheConsts.CACHE_MAGIC_GIRD_NAME;
			 String name = cachename+":"+page.getUrl();
			 WZGLUtils.setCache(cachename, name, tgrid);
			 //更新爬取日志
    		 sb.append("读取第"+size+"详情.................").append("\n\r");
    		 String log = (String) WZGLUtils.getCache(CacheConsts.CACHE_LOG_MAGIC_NAME,"爬取日志:"+pic_);
    		 log += sb.toString();
    		 String log_cache = CacheConsts.CACHE_LOG_MAGIC_NAME;
    		 String log_name = CacheConsts.CACHE_LOG_MAGIC_NAME+":"+pic_;
    		 WZGLUtils.setCache(log_cache, log_name, log);
        	
        	 System.out.println("========================================分割线=================================\n");
    		 size++;
		}else{
			 List<String> page_1= page.getHtml().xpath("//ul[@id='Tag_list']/li").all();
			 int num1=1;
	       	 for(int i = 0;i<page_1.size();i++){
	       		String li = page_1.get(i);
	       		t = new Tgrid();  		 
	       		Document docList = Jsoup.parse(li);
	       		//网页详情
	       		String href = docList.select("a").attr("href");
	       		//列表中显示的封面地址
	       		String img = docList.select("a").select("img").attr("src");
	       		//列表标题
	       		String title = docList.select("a").attr("title");
	       		//列表摘要
	       		String desc = docList.getElementsByClass("Article_Info").select("u").select("a").attr("title");
	       		//列表发布时间
	       		String time = docList.getElementsByClass("Article_Info").select("em").text();
	       		String cn = CacheConsts.CACHE_MAGIC_GIRD_NAME; 
	       		String key = CacheConsts.CACHE_MAGIC_GIRD_NAME+":"+href;
	       	    //是否已经爬取过
	       		if(!WZGLUtils.exists(cn, key)){
	       			System.out.println("地址："+href);
		       		System.out.println("标题："+title);
		       		System.out.println("图片："+img);
		       		System.out.println("时间："+time);
		       		System.out.println("类别："+desc+"\n");
	       			tf = new GameSkyVo();
	       			tf.setFdescribe("27270-美女");
	       			tf.setFmzgicId("img100001");
	       			tf.setFtype("100010");
	       			//保存到对象
			       	t.setFtitle(title);t.setFdate(new Date());t.setFtitle(title);t.setFcoverImg(img);t.setFcoverImg2(img);
			       	t.setFaddress(href);t.setFgetDate(time);t.setFdescribe(tf.getFdescribe());
			       	t.setFtype(tf.getFtype());t.setFmzgicId(tf.getFmzgicId());t.setFstatus(0);
			       	//把爬取了的url放入缓存
			       	String cachename = CacheConsts.CACHE_MAGIC_GIRD_NAME;
					String name = cachename+":"+href;
					WZGLUtils.setCache(cachename, name, t); 
					//记录爬取日志
	        		sb.append("第"+num1+"图片开始读取----------------->\r");
	        		sb.append("第"+num1+"图片名称:"+title).append("\r");
	        		sb.append("第"+num1+"图片详情URL:"+href).append("\r");
	        		sb.append("第"+num1+"图片时间:"+time).append("\r");
	        		sb.append("第"+num1+"图片结束----------------->").append("\r");
	        		//记录日志加入缓存
	        		String log = Redis.use(CacheConsts.CACHE_LOG_MAGIC_NAME).get(pic_);
	        		log += sb.toString();
	        		String log_cache = CacheConsts.CACHE_LOG_MAGIC_NAME;
	        		String log_name = CacheConsts.CACHE_LOG_MAGIC_NAME+":"+pic_;
	        		WZGLUtils.setCache(log_cache, log_name, log);
	        		//把爬取了的url加入数据库
	        		TfinishUrl tf = new TfinishUrl();
	        		tf.setFurl(href);
	        		tf.setFdate(new Date());
	        		me.save(tf);
		       	    page.setSkip(true);
		       		//获取文章正文
		       		page.addTargetRequest(href);
	       		}
	       		num1++;
	       	 }
		}
	}

	@Override
	public Site getSite() {
		return site;
	}
	
}
