/**
 *  
 *  
 *   @Description    抓取文章内容并挖掘文章数据,如,时间,来源,出链接等相关信息,
 *   				  实现了Runnable接口,可并发抓取文章内容和挖掘信息
 *   @creator         tangkun
 *   @create-time     2011-7-15
 *   @revision        $Id
 */

package com.scrawler.regex.template;

import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;

import com.scrawler.html.template.SecondContentCrawlerPool;
import com.scrawler.main.InitProject;
import com.scrawler.util.FileUtil;
import com.scrawler.util.HttpUtils;
import com.scrawler.util.PathUtil;
import com.scrawler.vo.NewsContent;
import com.scrawler.vo.NewsContentConfig;

@Service()
//非单例
@Scope("prototype")
public class FetchForumContent implements Runnable {
	private String url;
	
	@Autowired
	private HttpUtils httpUtils;
	
	private NewsContentConfig config;
	
	/**
	 * 构造函数
	 * @param url  		要挖掘文章信息的url
	 * @param index		进行文本存储的索引编号
	 * @param newsinfo	挖掘到信息的存储体
	 */
	
	public void init(String url,NewsContentConfig config) {
		this.url = url;
		this.config = config;
	}

	@Override
	public void run() {
		// TODO Auto-generated method stub
		NewsContent newsContent = new NewsContent();
		newsContent.setUrl(url);
		String titleStr=null;
		List<NewsContent> results;
		newsContent.setType_id(config.getTypeId());
		try {
			System.out.println(url+","+config.getEncoding());
			// String content = httpUtils.getContentByUrl(url, config.getEncoding());
			String content = FileUtil.readFile(PathUtil.getInstance().getBasePath()+"/demoData/tianyaContent.txt");
			// FileUtil.writeStringToDisk("F:\\tianyaContent.txt", content);
			content = content.replaceAll("\\s+", " ");
			List<String> regexs = new ArrayList<String>();
			Pattern titleP = Pattern.compile(config.getTitle());
	    	Matcher titleM = titleP.matcher(content);//开始编译
	    	while (titleM.find()) {
	    		String value =titleM.group(1);		    	 	
	    	    System.out.println("regex,"+config.getTitle()+",value: "+value);
	    	    titleStr=value;
	    	}

	    	regexs.add(config.getAuthor());
			regexs.add(config.getContent());
			regexs.add(config.getPub_time());
			regexs.add(config.getHits());
			regexs.add(config.getComments());
			int maxNum=0;
			List<List<String>> values = new ArrayList<List<String>>();
			  for(int i=0;i<regexs.size();i++){
				  List<String> items = new ArrayList<String>();	
				  String regex = regexs.get(i);
				  if(regex==null || regex.length()<1)continue;
		    	  Pattern pattern = Pattern.compile(regex);
		    	  Matcher matcher = pattern.matcher(content);//开始编译
		    	  System.out.println(regex);
		    	  while (matcher.find()) {
		    		 String value =matcher.group(1);		    	 	
		    	     System.out.println("regex,"+regex+",value: "+value);
		    	     items.add(value);
		    	  }
		    	  maxNum=items.size()>maxNum?items.size():maxNum;
		    	  values.add(items);
		    }
			List<String> authors = values.get(0);
			List<String> contents = values.get(1);
			List<String> pubTimes = values.get(2);
			List<String> hits = values.get(3);
			List<String> comments = values.get(4);
			results = new ArrayList<NewsContent>(values.size()); 
			for(int i=0;i<results.size();i++){
				NewsContent item = results.get(i);
				item.setTitle(titleStr);
				item.setWebname(config.getWebsite_name());
				item.setUrl(url);
				if(i<=authors.size()){
					item.setAuthor(authors.get(i));
				}
				if(i<=contents.size()){
					item.setContent(contents.get(i));
				}
				if(i<=pubTimes.size()){
					item.setContent(pubTimes.get(i));
				}
				if(i<=hits.size()){
					item.setContent(hits.get(i));
				}
				if(i<=comments.size()){
					item.setContent(comments.get(i));
				}
			}  			  
		} catch(Exception e){
			e.printStackTrace();
			return;
		}
		//NewsContentSql.insert(newsContent);
//		System.out.println("==================================================>");
//		System.out.println(url);
//		System.out.println(newsContent);
	}

	/*
	 * 
	 * 测试类
	 */

	public static void main(String[] args) {
		// TODO Auto-generated method stu
		InitProject.init();
		NewsContentConfig newsConfig = CompomentUrl.forumContentConfig.get(4);
		FetchForumContent stask = InitProject.getContext().getBean(FetchForumContent.class);
		stask.init("http://bbs.ifeng.com/viewthread.php?tid=16172349&highlight=", newsConfig);
		SecondContentCrawlerPool.getInstance().addTask(stask);
	}
}
