package org.top10.collector;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

import org.cyberneko.html.parsers.DOMParser;
import org.top10.dealor.FileDealor;
import org.top10.executor.ParseSource;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;



public class Collector {
	public static final long MAX_RETRY_DELAY = 10 * 60 * 1000; 
	static final long CACHE_OUTDate=1000*60*60*12;//过了12小时就算失效
	// dead URL map
	private Collector(){
	}
	private final static Map<String,ParseSource>CACHE=new ConcurrentHashMap<String,ParseSource>();
	/**
	 * 取得该页面的源代码
	 * @param url      页面地址
	 * @param encoding 页面编码
	 * @return         返回ParseSource对象
	 * @throws SAXException
	 * @throws IOException
	 */
	public static ParseSource createCache(String url,String encoding) throws SAXException, IOException{
		DOMParser parser = new DOMParser();
		String text=Collector.collectStream(url,encoding);
		StringReader stringReader=new StringReader(text);
		parser.parse(new InputSource(stringReader));
		Document doc = parser.getDocument();
		ParseSource c=new ParseSource(doc,text,url);
		return c;
	}
	public static ParseSource createCache2(String url,String encoding) throws SAXException, IOException{
		DOMParser parser = new DOMParser();
		String text=readFile(url,encoding);
		StringReader stringReader=new StringReader(text);
		parser.parse(new InputSource(stringReader));
		Document doc = parser.getDocument();
		ParseSource c=new ParseSource(doc,text,url);
		return c;
	}
	public static  String collectStream(BufferedReader fr) {
		try {
			
			StringBuffer sb=new StringBuffer();
			String line=null;
			while((line=fr.readLine())!=null){
				sb.append(line).append("\n");
			}
			return sb.toString();
		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}
	}
	public static  String collectStream(String url,String encoding) {
		InputStreamReader ff;
		try {
			URL u=new URL(url);
			ff = new InputStreamReader(u.openStream(),encoding);
			BufferedReader fr = new BufferedReader(ff);
			StringBuffer sb=new StringBuffer();
			String line=null;
			while((line=fr.readLine())!=null){
				sb.append(line).append("\n");
			}
			return sb.toString();
		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}
	}
	public  static ParseSource collectDoc(String url,String encoding,boolean cache)  {
		ParseSource c=null;
		if(cache){
			c=CACHE.get(url);
			if(c!=null&&c.getLastTime()>System.currentTimeMillis())
				return c;
		}
		try {
			c=createCache(url,encoding);
			CACHE.put(url, c);
		} catch (SAXException e) {
			e.printStackTrace();
			return null;
		} catch (IOException e) {
			e.printStackTrace();
			return null;
		}
		
		return c;
	}
	
	public static String  readFile(String fname,String encoding) {
		try {
			InputStream is = FileDealor.class.getClassLoader().getResourceAsStream(
					fname);
			InputStreamReader ff;
			ff = new InputStreamReader(is,encoding);
			BufferedReader fr = new BufferedReader(ff);
			String line=null;
			StringBuffer sb=new StringBuffer();
			while((line=fr.readLine())!=null){
				sb.append(line).append("\n");
			}
			return sb.toString();
		} catch (Exception e) {
			e.printStackTrace();
		}
		return null;
	}

	public static void main(String args[]){
		//Collector.collectStream("http://top.video.sina.com.cn/ws/GetTopDataList.php?top_type=day&top_cat=sphhzpx&top_time=20100725&top_show_num=10&top_order=DESC&js_var=sphhzpx_1_data&chars=utf-8");
	}

}
