package com.jackingod.crawler;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;

public abstract class TemplateCrawler {
	String url;
	String htmlContent;
	String newsTitle;
	String newsContent;

	String NewLine = System.getProperty("line.separator");	
	
	public String getNewsTitle() {
		return newsTitle;
	}

	public void setNewsTitle(String newsTitle) {
		this.newsTitle = newsTitle;
	}

	public String getNewsContent() {
		return newsContent;
	}

	public void setNewsContent(String newsContent) {
		this.newsContent = newsContent;
	}

	void init() {
		this.htmlContent = null;
		this.newsTitle = null;
		this.newsContent = null;
	}

	void startCrawling() {
		StringBuffer sb = new StringBuffer();
		InputStreamReader isr = null;
		BufferedReader br = null;

		try {
			URL newsUrl = new URL(url);

			isr = new InputStreamReader(newsUrl.openStream());
			while (!isr.ready()) {
				// wait until the full web page has been downloaded.
			}

			br = new BufferedReader(isr);
			while (br.ready()) {
				sb.append(br.readLine()).append(NewLine);
			}

			this.htmlContent = sb.toString();

		} catch (MalformedURLException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			try {
				if (br != null)
					br.close();
				if (isr != null)
					isr.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	public void start(String url) {};
}
